commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e912f360ff66f2247223386e2b3600e1631a5a50
|
django_oauthlib/testrunner.py
|
django_oauthlib/testrunner.py
|
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django_oauthlib',)
)
def run_tests():
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
import logging
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django_oauthlib',
)
)
def run_tests():
for log_id in ('oauthlib', 'django-oauthlib'):
log = logging.getLogger(log_id)
log.addHandler(logging.StreamHandler(sys.stdout))
log.setLevel(logging.DEBUG)
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
Add session and logging to test setup.
|
Add session and logging to test setup.
|
Python
|
bsd-3-clause
|
ib-lundgren/django-oauthlib
|
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django_oauthlib',)
)
def run_tests():
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
Add session and logging to test setup.
|
import logging
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django_oauthlib',
)
)
def run_tests():
for log_id in ('oauthlib', 'django-oauthlib'):
log = logging.getLogger(log_id)
log.addHandler(logging.StreamHandler(sys.stdout))
log.setLevel(logging.DEBUG)
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
<commit_before>import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django_oauthlib',)
)
def run_tests():
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
<commit_msg>Add session and logging to test setup.<commit_after>
|
import logging
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django_oauthlib',
)
)
def run_tests():
for log_id in ('oauthlib', 'django-oauthlib'):
log = logging.getLogger(log_id)
log.addHandler(logging.StreamHandler(sys.stdout))
log.setLevel(logging.DEBUG)
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django_oauthlib',)
)
def run_tests():
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
Add session and logging to test setup.import logging
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django_oauthlib',
)
)
def run_tests():
for log_id in ('oauthlib', 'django-oauthlib'):
log = logging.getLogger(log_id)
log.addHandler(logging.StreamHandler(sys.stdout))
log.setLevel(logging.DEBUG)
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
<commit_before>import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django_oauthlib',)
)
def run_tests():
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
<commit_msg>Add session and logging to test setup.<commit_after>import logging
import sys
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF='django_oauthlib.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django_oauthlib',
)
)
def run_tests():
for log_id in ('oauthlib', 'django-oauthlib'):
log = logging.getLogger(log_id)
log.addHandler(logging.StreamHandler(sys.stdout))
log.setLevel(logging.DEBUG)
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['django_oauthlib'])
sys.exit(failures)
|
c272e73c0d3112425e0ba25c58448f7c1d492d11
|
api/src/SearchApi.py
|
api/src/SearchApi.py
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
search_response = json.dumps(search_request.execute(), separators=[',',':'])
return search_response
if __name__ == "__main__":
print youtubeSearch("paramore", 5)
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
searchResponse = json.dumps(search_request.execute(), separators=[',',':'])
searchData = json.loads(searchResponse)
filteredData = filterData(searchData)
filteredResponse = json.dumps(filteredData)
return filteredResponse
def filterData(original):
filtered = []
for item in original["items"]:
temp = {}
temp["title"] = item["snippet"]["title"]
temp["descriptipn"] = item["snippet"]["description"]
temp["uploader"] = item["snippet"]["channelTitle"]
temp["id"] = item["id"]["videoId"]
filtered.append(temp)
return filtered
|
Update search api filter out unwanted information
|
Update search api filter out unwanted information
|
Python
|
mit
|
jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
search_response = json.dumps(search_request.execute(), separators=[',',':'])
return search_response
if __name__ == "__main__":
print youtubeSearch("paramore", 5)
Update search api filter out unwanted information
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
searchResponse = json.dumps(search_request.execute(), separators=[',',':'])
searchData = json.loads(searchResponse)
filteredData = filterData(searchData)
filteredResponse = json.dumps(filteredData)
return filteredResponse
def filterData(original):
filtered = []
for item in original["items"]:
temp = {}
temp["title"] = item["snippet"]["title"]
temp["descriptipn"] = item["snippet"]["description"]
temp["uploader"] = item["snippet"]["channelTitle"]
temp["id"] = item["id"]["videoId"]
filtered.append(temp)
return filtered
|
<commit_before>from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
search_response = json.dumps(search_request.execute(), separators=[',',':'])
return search_response
if __name__ == "__main__":
print youtubeSearch("paramore", 5)
<commit_msg>Update search api filter out unwanted information<commit_after>
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
searchResponse = json.dumps(search_request.execute(), separators=[',',':'])
searchData = json.loads(searchResponse)
filteredData = filterData(searchData)
filteredResponse = json.dumps(filteredData)
return filteredResponse
def filterData(original):
filtered = []
for item in original["items"]:
temp = {}
temp["title"] = item["snippet"]["title"]
temp["descriptipn"] = item["snippet"]["description"]
temp["uploader"] = item["snippet"]["channelTitle"]
temp["id"] = item["id"]["videoId"]
filtered.append(temp)
return filtered
|
from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
search_response = json.dumps(search_request.execute(), separators=[',',':'])
return search_response
if __name__ == "__main__":
print youtubeSearch("paramore", 5)
Update search api filter out unwanted informationfrom apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
searchResponse = json.dumps(search_request.execute(), separators=[',',':'])
searchData = json.loads(searchResponse)
filteredData = filterData(searchData)
filteredResponse = json.dumps(filteredData)
return filteredResponse
def filterData(original):
filtered = []
for item in original["items"]:
temp = {}
temp["title"] = item["snippet"]["title"]
temp["descriptipn"] = item["snippet"]["description"]
temp["uploader"] = item["snippet"]["channelTitle"]
temp["id"] = item["id"]["videoId"]
filtered.append(temp)
return filtered
|
<commit_before>from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
search_response = json.dumps(search_request.execute(), separators=[',',':'])
return search_response
if __name__ == "__main__":
print youtubeSearch("paramore", 5)
<commit_msg>Update search api filter out unwanted information<commit_after>from apiclient.discovery import build
import json
# Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps
# tab of
# https://cloud.google.com/console
# Please ensure that you have enabled the YouTube Data API for your project.
devKeyFile = open("search-api.key", "rb")
DEVELOPER_KEY = devKeyFile.read()
devKeyFile.close()
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def youtubeSearch(query, maxResults=50):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
# Call the search.list method to retrieve results matching the specified
# query term.
search_request = youtube.search().list(
part="id,snippet",
q=query,
type="video",
maxResults=maxResults,
order="relevance"
)
searchResponse = json.dumps(search_request.execute(), separators=[',',':'])
searchData = json.loads(searchResponse)
filteredData = filterData(searchData)
filteredResponse = json.dumps(filteredData)
return filteredResponse
def filterData(original):
filtered = []
for item in original["items"]:
temp = {}
temp["title"] = item["snippet"]["title"]
temp["descriptipn"] = item["snippet"]["description"]
temp["uploader"] = item["snippet"]["channelTitle"]
temp["id"] = item["id"]["videoId"]
filtered.append(temp)
return filtered
|
557c2ab8d6a0416219e3323427bd5e7bd735554f
|
nitroml/analytics/materialized_artifact.py
|
nitroml/analytics/materialized_artifact.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractproperty
def to_dataframe(self) -> pd.Dataframe:
"""Returns dataframe representation of the artifact."""
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractmethod
def to_dataframe(self) -> pd.DataFrame:
"""Returns dataframe representation of the artifact."""
|
Fix case typo in 'to_dataframe' abstract method return type.
|
Fix case typo in 'to_dataframe' abstract method return type.
Also updates abstract property to abstract method.
PiperOrigin-RevId: 339248589
|
Python
|
apache-2.0
|
google/nitroml
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractproperty
def to_dataframe(self) -> pd.Dataframe:
"""Returns dataframe representation of the artifact."""
Fix case typo in 'to_dataframe' abstract method return type.
Also updates abstract property to abstract method.
PiperOrigin-RevId: 339248589
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractmethod
def to_dataframe(self) -> pd.DataFrame:
"""Returns dataframe representation of the artifact."""
|
<commit_before># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractproperty
def to_dataframe(self) -> pd.Dataframe:
"""Returns dataframe representation of the artifact."""
<commit_msg>Fix case typo in 'to_dataframe' abstract method return type.
Also updates abstract property to abstract method.
PiperOrigin-RevId: 339248589<commit_after>
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractmethod
def to_dataframe(self) -> pd.DataFrame:
"""Returns dataframe representation of the artifact."""
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractproperty
def to_dataframe(self) -> pd.Dataframe:
"""Returns dataframe representation of the artifact."""
Fix case typo in 'to_dataframe' abstract method return type.
Also updates abstract property to abstract method.
PiperOrigin-RevId: 339248589# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractmethod
def to_dataframe(self) -> pd.DataFrame:
"""Returns dataframe representation of the artifact."""
|
<commit_before># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractproperty
def to_dataframe(self) -> pd.Dataframe:
"""Returns dataframe representation of the artifact."""
<commit_msg>Fix case typo in 'to_dataframe' abstract method return type.
Also updates abstract property to abstract method.
PiperOrigin-RevId: 339248589<commit_after># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# Lint as: python3
"""A generic Materialized Artifact definition."""
import abc
import pandas as pd
class MaterializedArtifact(abc.ABC):
"""Abstract base class for materialized artifacts.
Represents an output of a tfx component that has been materialized on disk.
Subclasses provide implementations to load and display a specific artifact
type.
"""
@abc.abstractmethod
def show(self) -> None:
"""Displays respective visualization for artifact type."""
@abc.abstractmethod
def to_dataframe(self) -> pd.DataFrame:
"""Returns dataframe representation of the artifact."""
|
caefc30377f45ecdb955fec86e7de681a5cb6522
|
clean_txt_files.py
|
clean_txt_files.py
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line)
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line + '\n')
|
Add break line on clean txt files
|
Add break line on clean txt files
|
Python
|
mit
|
SetaSouto/license-plate-detection
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line)
Add break line on clean txt files
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line + '\n')
|
<commit_before>"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line)
<commit_msg>Add break line on clean txt files<commit_after>
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line + '\n')
|
"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line)
Add break line on clean txt files"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line + '\n')
|
<commit_before>"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line)
<commit_msg>Add break line on clean txt files<commit_after>"""
Script to clean the txt generated files and kept only the 37 class: LICENSE-PLATE and set the number 0 to the class.
"""
import os
dataset_dir = "data/dataset/"
for filename in list(filter(lambda x: x[-3:] == "txt", os.listdir(dataset_dir))):
with open(dataset_dir + filename, 'r') as f:
content = f.read()
with open(dataset_dir + filename, 'w') as f:
license_plate_lines = filter(lambda x: x.split(' ')[0] == "37", content.split('\n'))
for line in license_plate_lines:
line = line.split(' ')
line[0] = "0"
line = ' '.join(line)
f.write(line + '\n')
|
444bba442e581226b650af929c85ccc885c60297
|
microcosm/tracing.py
|
microcosm/tracing.py
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": True,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
from microcosm.config.types import boolean
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
logging_enabled=typed(boolean, False),
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": graph.config.tracer.logging_enabled,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
Disable jaeger logging by default
|
Disable jaeger logging by default
|
Python
|
apache-2.0
|
globality-corp/microcosm,globality-corp/microcosm
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": True,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
Disable jaeger logging by default
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
from microcosm.config.types import boolean
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
logging_enabled=typed(boolean, False),
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": graph.config.tracer.logging_enabled,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
<commit_before>from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": True,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
<commit_msg>Disable jaeger logging by default<commit_after>
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
from microcosm.config.types import boolean
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
logging_enabled=typed(boolean, False),
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": graph.config.tracer.logging_enabled,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": True,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
Disable jaeger logging by defaultfrom jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
from microcosm.config.types import boolean
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
logging_enabled=typed(boolean, False),
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": graph.config.tracer.logging_enabled,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
<commit_before>from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": True,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
<commit_msg>Disable jaeger logging by default<commit_after>from jaeger_client.config import (
DEFAULT_REPORTING_HOST,
DEFAULT_REPORTING_PORT,
DEFAULT_SAMPLING_PORT,
Config,
)
from microcosm.api import binding, defaults, typed
from microcosm.config.types import boolean
SPAN_NAME = "span_name"
@binding("tracer")
@defaults(
sample_type="ratelimiting",
sample_param=typed(int, 10),
sampling_port=typed(int, DEFAULT_SAMPLING_PORT),
reporting_port=typed(int, DEFAULT_REPORTING_PORT),
reporting_host=DEFAULT_REPORTING_HOST,
logging_enabled=typed(boolean, False),
)
def configure_tracing(graph):
"""
See https://www.jaegertracing.io/docs/1.12/sampling/ for more info about
available sampling strategies.
"""
config = Config(
config={
"sampler": {
"type": graph.config.tracer.sample_type,
"param": graph.config.tracer.sample_param,
},
"local_agent": {
"sampling_port": graph.config.tracer.sampling_port,
"reporting_port": graph.config.tracer.reporting_port,
"reporting_host": graph.config.tracer.reporting_host,
},
"logging": graph.config.tracer.logging_enabled,
},
service_name=graph.metadata.name,
)
return config.initialize_tracer()
|
864555f431a5dc0560e93ef9055e6cc49c499835
|
tests/test_basic_create.py
|
tests/test_basic_create.py
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 2)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
print proj
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assertEqual(proj['name'], spec['name'])
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
Adjust test for returned name
|
Adjust test for returned name
|
Python
|
bsd-3-clause
|
westernx/sgmock
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 2)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
Adjust test for returned name
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
print proj
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assertEqual(proj['name'], spec['name'])
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
<commit_before>from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 2)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
<commit_msg>Adjust test for returned name<commit_after>
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
print proj
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assertEqual(proj['name'], spec['name'])
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 2)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
Adjust test for returned namefrom common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
print proj
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assertEqual(proj['name'], spec['name'])
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
<commit_before>from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 2)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
<commit_msg>Adjust test for returned name<commit_after>from common import *
class TestBasicCreate(TestCase):
def test_create_default_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
spec = dict(name=mini_uuid())
proj = sg.create(type_, spec)
print proj
self.assertIsNot(spec, proj)
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assertEqual(proj['name'], spec['name'])
self.assert_(proj['id'])
def test_create_additional_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
def test_create_missing_return(self):
sg = Shotgun()
type_ = 'Dummy' + mini_uuid().upper()
name = mini_uuid()
proj = sg.create(type_, dict(name=name), ['name', 'does_not_exist'])
self.assertEqual(len(proj), 3)
self.assertEqual(proj['type'], type_)
self.assert_(proj['id'])
self.assertEqual(proj['name'], name)
|
1b80972fe97bebbb20d9e6073b41d286f253c1ef
|
documents/views/utils.py
|
documents/views/utils.py
|
import mimetypes
import os
from django.http import HttpResponse
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
|
import mimetypes
import os
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
wrapper = FileWrapper(file(outputFile))
response = HttpResponse(wrapper, mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
response['Content-Length'] = os.path.getsize(outputFile)
# remove the tmp file
os.remove(outputFile)
return response
|
Use FileWrapper to send files to browser in chunks of 8KB
|
Use FileWrapper to send files to browser in chunks of 8KB
Maybe this will fix a problem on the test server when it tries to send a huge file.
|
Python
|
agpl-3.0
|
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
|
import mimetypes
import os
from django.http import HttpResponse
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
Use FileWrapper to send files to browser in chunks of 8KB
Maybe this will fix a problem on the test server when it tries to send a huge file.
|
import mimetypes
import os
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
wrapper = FileWrapper(file(outputFile))
response = HttpResponse(wrapper, mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
response['Content-Length'] = os.path.getsize(outputFile)
# remove the tmp file
os.remove(outputFile)
return response
|
<commit_before>import mimetypes
import os
from django.http import HttpResponse
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
<commit_msg>Use FileWrapper to send files to browser in chunks of 8KB
Maybe this will fix a problem on the test server when it tries to send a huge file.<commit_after>
|
import mimetypes
import os
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
wrapper = FileWrapper(file(outputFile))
response = HttpResponse(wrapper, mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
response['Content-Length'] = os.path.getsize(outputFile)
# remove the tmp file
os.remove(outputFile)
return response
|
import mimetypes
import os
from django.http import HttpResponse
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
Use FileWrapper to send files to browser in chunks of 8KB
Maybe this will fix a problem on the test server when it tries to send a huge file.import mimetypes
import os
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
wrapper = FileWrapper(file(outputFile))
response = HttpResponse(wrapper, mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
response['Content-Length'] = os.path.getsize(outputFile)
# remove the tmp file
os.remove(outputFile)
return response
|
<commit_before>import mimetypes
import os
from django.http import HttpResponse
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
<commit_msg>Use FileWrapper to send files to browser in chunks of 8KB
Maybe this will fix a problem on the test server when it tries to send a huge file.<commit_after>import mimetypes
import os
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
mimetypes.init()
mimetypes.add_type('application/epub+zip','.epub')
mimetypes.add_type('text/x-brl','.brl')
mimetypes.add_type('text/x-sbsform-g0','.bv')
mimetypes.add_type('text/x-sbsform-g1','.bv')
mimetypes.add_type('text/x-sbsform-g2','.bk')
def render_to_mimetype_response(mimetype, filename, outputFile):
ext = mimetypes.guess_extension(mimetype)
assert ext != None
wrapper = FileWrapper(file(outputFile))
response = HttpResponse(wrapper, mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s%s\"" % (filename, ext)
response['Content-Length'] = os.path.getsize(outputFile)
# remove the tmp file
os.remove(outputFile)
return response
|
a12e3f0de9e8c10c279d795744f87b7e716bd34c
|
markitup_filebrowser/fields.py
|
markitup_filebrowser/fields.py
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
# allow South to handle MarkupField smoothly
try:
from south.modelsinspector import add_introspection_rules
# For a normal MarkupField, the add_rendered_field attribute is
# always True, which means no_rendered_field arg will always be
# True in a frozen MarkupField, which is what we want.
add_introspection_rules(rules=[((MarkupFilebrowserFiled,),
[],
{'no_rendered_field': ('add_rendered_field',
{})})],
patterns=['markitup_filebrowser\.fields\.'])
except ImportError:
pass
|
Allow south to handle MarkupFilebrowserFiled
|
Allow south to handle MarkupFilebrowserFiled
|
Python
|
bsd-3-clause
|
Iv/django-markiup-filebrowser,Iv/django-markiup-filebrowser
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}Allow south to handle MarkupFilebrowserFiled
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
# allow South to handle MarkupField smoothly
try:
from south.modelsinspector import add_introspection_rules
# For a normal MarkupField, the add_rendered_field attribute is
# always True, which means no_rendered_field arg will always be
# True in a frozen MarkupField, which is what we want.
add_introspection_rules(rules=[((MarkupFilebrowserFiled,),
[],
{'no_rendered_field': ('add_rendered_field',
{})})],
patterns=['markitup_filebrowser\.fields\.'])
except ImportError:
pass
|
<commit_before>from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}<commit_msg>Allow south to handle MarkupFilebrowserFiled<commit_after>
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
# allow South to handle MarkupField smoothly
try:
from south.modelsinspector import add_introspection_rules
# For a normal MarkupField, the add_rendered_field attribute is
# always True, which means no_rendered_field arg will always be
# True in a frozen MarkupField, which is what we want.
add_introspection_rules(rules=[((MarkupFilebrowserFiled,),
[],
{'no_rendered_field': ('add_rendered_field',
{})})],
patterns=['markitup_filebrowser\.fields\.'])
except ImportError:
pass
|
from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}Allow south to handle MarkupFilebrowserFiledfrom markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
# allow South to handle MarkupField smoothly
try:
from south.modelsinspector import add_introspection_rules
# For a normal MarkupField, the add_rendered_field attribute is
# always True, which means no_rendered_field arg will always be
# True in a frozen MarkupField, which is what we want.
add_introspection_rules(rules=[((MarkupFilebrowserFiled,),
[],
{'no_rendered_field': ('add_rendered_field',
{})})],
patterns=['markitup_filebrowser\.fields\.'])
except ImportError:
pass
|
<commit_before>from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}<commit_msg>Allow south to handle MarkupFilebrowserFiled<commit_after>from markitup.fields import MarkupField
import widgets
class MarkupFilebrowserFiled(MarkupField):
def formfield(self, **kwargs):
defaults = {'widget': widgets.MarkitUpFilebrowserWiget}
defaults.update(kwargs)
return super(MarkupFilebrowserFiled, self).formfield(**defaults)
from django.contrib.admin.options import FORMFIELD_FOR_DBFIELD_DEFAULTS
FORMFIELD_FOR_DBFIELD_DEFAULTS[MarkupFilebrowserFiled] = {'widget': widgets.AdminMarkitUpFilebrowserWiget}
# allow South to handle MarkupField smoothly
try:
from south.modelsinspector import add_introspection_rules
# For a normal MarkupField, the add_rendered_field attribute is
# always True, which means no_rendered_field arg will always be
# True in a frozen MarkupField, which is what we want.
add_introspection_rules(rules=[((MarkupFilebrowserFiled,),
[],
{'no_rendered_field': ('add_rendered_field',
{})})],
patterns=['markitup_filebrowser\.fields\.'])
except ImportError:
pass
|
2ace9ce514d7299a8f3e8dca134a6e4eb3284937
|
parser2.py
|
parser2.py
|
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
Move parsing loop into the class itself.
|
Move parsing loop into the class itself.
|
Python
|
mit
|
zimolzak/Ignition-poker-parser
|
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
Move parsing loop into the class itself.
|
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
<commit_before>from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
<commit_msg>Move parsing loop into the class itself.<commit_after>
|
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
Move parsing loop into the class itself.from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
<commit_before>from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
<commit_msg>Move parsing loop into the class itself.<commit_after>from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
|
eb5dc3ef7e7904549f50a4255477ed50d3ee53ab
|
twinsies/clock.py
|
twinsies/clock.py
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=5000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
print("Starting scheduler")
sched.start()
|
Reduce fetch size to 5000. Don't run job on startup.
|
Reduce fetch size to 5000. Don't run job on startup.
|
Python
|
mit
|
kkwteh/twinyewest
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
Reduce fetch size to 5000. Don't run job on startup.
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=5000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
print("Starting scheduler")
sched.start()
|
<commit_before>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
<commit_msg>Reduce fetch size to 5000. Don't run job on startup.<commit_after>
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=5000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
print("Starting scheduler")
sched.start()
|
from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
Reduce fetch size to 5000. Don't run job on startup.from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=5000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
print("Starting scheduler")
sched.start()
|
<commit_before>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=10000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
twinsy_finder()
print("Starting scheduler")
sched.start()
<commit_msg>Reduce fetch size to 5000. Don't run job on startup.<commit_after>from apscheduler.schedulers.blocking import BlockingScheduler
from twinsies.twitter import (random_trend_query, fetch_tweets, dig_for_twins,
update_status)
from memory_profiler import profile
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=16)
@profile
def twinsy_finder(fetch_size=5000):
print("Running twinsy finder...")
fetched_tweets = fetch_tweets('Kanye', fetch_size=fetch_size)
tweets = dig_for_twins(fetched_tweets)
if tweets:
print("Twins found, updating status.")
update_status(tweets)
else:
print("No twins found.")
if __name__ == '__main__':
print("Starting scheduler")
sched.start()
|
796e734f67ea3c4afcb6c17204108d9b2c3d7120
|
website/forms.py
|
website/forms.py
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
def clean_email(self):
email = self.cleaned_data.get("email")
if self.instance.user.email != email:
try:
User.objects.get(email = email)
raise forms.ValidationError("Email taken.")
except User.DoesNotExist:
pass
return email
|
Validate email id, while editing user profile
|
Validate email id, while editing user profile
|
Python
|
agpl-3.0
|
CoderBounty/coderbounty,CoderBounty/coderbounty,atuljain/coderbounty,atuljain/coderbounty,atuljain/coderbounty,CoderBounty/coderbounty,atuljain/coderbounty,CoderBounty/coderbounty
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.emailValidate email id, while editing user profile
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
def clean_email(self):
email = self.cleaned_data.get("email")
if self.instance.user.email != email:
try:
User.objects.get(email = email)
raise forms.ValidationError("Email taken.")
except User.DoesNotExist:
pass
return email
|
<commit_before>from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email<commit_msg>Validate email id, while editing user profile<commit_after>
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
def clean_email(self):
email = self.cleaned_data.get("email")
if self.instance.user.email != email:
try:
User.objects.get(email = email)
raise forms.ValidationError("Email taken.")
except User.DoesNotExist:
pass
return email
|
from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.emailValidate email id, while editing user profilefrom django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
def clean_email(self):
email = self.cleaned_data.get("email")
if self.instance.user.email != email:
try:
User.objects.get(email = email)
raise forms.ValidationError("Email taken.")
except User.DoesNotExist:
pass
return email
|
<commit_before>from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email<commit_msg>Validate email id, while editing user profile<commit_after>from django import forms
from .models import Issue,Bounty,UserProfile
from django.contrib.auth.models import User
class IssueCreateForm(forms.ModelForm):
issueUrl = forms.CharField(label="issueUrl")
class Meta:
model = Issue
fields = ('title','language','content')
class BountyCreateForm(forms.ModelForm):
class Meta:
model = Bounty
fields = ('price',)
class UserProfileForm(forms.ModelForm):
user = forms.IntegerField(label="", widget=forms.HiddenInput(), required=False)
first_name = forms.CharField(label="First Name")
last_name = forms.CharField(required=False, label="Last Name")
email = forms.EmailField(label="Email", max_length=255)
class Meta:
model = UserProfile
exclude = ('balance',)
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
user = kwargs['instance'].user
if user.pk:
self.fields['first_name'].initial = user.first_name
self.fields['last_name'].initial = user.last_name
self.fields['email'].initial = user.email
def clean_email(self):
email = self.cleaned_data.get("email")
if self.instance.user.email != email:
try:
User.objects.get(email = email)
raise forms.ValidationError("Email taken.")
except User.DoesNotExist:
pass
return email
|
7b9a04cb8655fad955829936c2b43b9ca37b3fe8
|
ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/db.py
|
ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/db.py
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name)
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
state = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name,
state="pending")
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
Add state column to user create api
|
Add state column to user create api
|
Python
|
mit
|
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name)
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")Add state column to user create api
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
state = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name,
state="pending")
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
<commit_before>import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name)
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")<commit_msg>Add state column to user create api<commit_after>
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
state = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name,
state="pending")
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name)
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")Add state column to user create apiimport uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
state = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name,
state="pending")
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
<commit_before>import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name)
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")<commit_msg>Add state column to user create api<commit_after>import uuid
from ckan import model
from ckan.lib import dictization
from ckan.plugins import toolkit
from sqlalchemy import Column, types
from sqlalchemy.ext.declarative import declarative_base
import logging
log = logging.getLogger(__name__)
Base = declarative_base()
def make_uuid():
return unicode(uuid.uuid4())
class UserForOrganization(Base):
__tablename__ = 'user_for_organization'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
name = Column(types.UnicodeText, nullable=False)
email = Column(types.UnicodeText, nullable=False)
business_id = Column(types.UnicodeText, nullable=False)
organization_name = Column(types.UnicodeText, nullable=False)
state = Column(types.UnicodeText, nullable=False)
@classmethod
def create(cls, name, email, business_id, organization_name):
user_for_organization = UserForOrganization(name=name,
email=email,
business_id=business_id,
organization_name=organization_name,
state="pending")
model.Session.add(user_for_organization)
model.repo.commit()
def init_table(engine):
Base.metadata.create_all(engine)
log.info("Table for users for organization is set-up")
|
98b0eb3d492cb816db7ffa7ad062dde36a1feadf
|
tests/unit/test_gettext.py
|
tests/unit/test_gettext.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
Use testtools as test base class.
|
Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98
|
Python
|
apache-2.0
|
varunarya10/oslo.i18n,openstack/oslo.i18n
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
<commit_msg>Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
<commit_msg>Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
|
c65306f78f1eb97714fd2086d20ff781faf78c3a
|
problems/starterpackages/SteinerStarter.py
|
problems/starterpackages/SteinerStarter.py
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = [] # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# Gets a problem from a file as an list of points.
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
# Outputs a list of edges to file "out.txt" for submission.
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = getProblem("st.txt") # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
outputSolutionsToFile(edges)
|
Make py starterpackage more like java/c++ one
|
Make py starterpackage more like java/c++ one
|
Python
|
mit
|
HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL,HMProgrammingClub/NYCSL
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = [] # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
Make py starterpackage more like java/c++ one
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# Gets a problem from a file as an list of points.
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
# Outputs a list of edges to file "out.txt" for submission.
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = getProblem("st.txt") # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
outputSolutionsToFile(edges)
|
<commit_before>import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = [] # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
<commit_msg>Make py starterpackage more like java/c++ one<commit_after>
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# Gets a problem from a file as an list of points.
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
# Outputs a list of edges to file "out.txt" for submission.
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = getProblem("st.txt") # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
outputSolutionsToFile(edges)
|
import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = [] # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
Make py starterpackage more like java/c++ oneimport math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# Gets a problem from a file as an list of points.
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
# Outputs a list of edges to file "out.txt" for submission.
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = getProblem("st.txt") # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
outputSolutionsToFile(edges)
|
<commit_before>import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = [] # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
<commit_msg>Make py starterpackage more like java/c++ one<commit_after>import math
import sys
# A helper class for working with points.
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
class Edge:
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# Gets a problem from a file as an list of points.
def getProblem(filename):
pts = []
with open(filename, 'r') as input:
for line in input:
l = line.split(' ')
pts.append(Point(float(l[0]), float(l[1])))
return pts
# Outputs a list of edges to file "out.txt" for submission.
def outputSolutionsToFile(edges):
f = open("out.txt", 'w')
for a in edges:
f.write(str(a.p1.x) + ' ' + str(a.p1.y) + ' ' + str(a.p2.x) + ' ' + str(a.p2.y) + '\n')
pts = getProblem("st.txt") # Where you will find the input points.
edges = [] # Edges should be added to this.
# Your code here. This sample code just connects the points in the order that they are given:
for a in range(1, len(pts)):
edges.append(Edge(pts[a-1], pts[a]))
outputSolutionsToFile(edges)
|
865b9d8307f35203d7242e9c431ec2f6cb65c42e
|
whyis/manager.py
|
whyis/manager.py
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("listroutes", commands.ListRoutes())
self.add_command("interpret", commands.RunInterpreter())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("testagent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstallapp", commands.UninstallApp())
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("interpret", commands.ListRoutes())
self.add_command("list_routes", commands.ListRoutes())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("test_agent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstall_app", commands.UninstallApp())
|
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
|
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
This reverts commit 7827598d1060442570685e94633093c550ce7ff2.
|
Python
|
apache-2.0
|
tetherless-world/graphene,tetherless-world/graphene,tetherless-world/satoru,tetherless-world/satoru,tetherless-world/satoru,tetherless-world/graphene,tetherless-world/satoru,tetherless-world/graphene
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("listroutes", commands.ListRoutes())
self.add_command("interpret", commands.RunInterpreter())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("testagent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstallapp", commands.UninstallApp())
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
This reverts commit 7827598d1060442570685e94633093c550ce7ff2.
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("interpret", commands.ListRoutes())
self.add_command("list_routes", commands.ListRoutes())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("test_agent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstall_app", commands.UninstallApp())
|
<commit_before># -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("listroutes", commands.ListRoutes())
self.add_command("interpret", commands.RunInterpreter())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("testagent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstallapp", commands.UninstallApp())
<commit_msg>Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
This reverts commit 7827598d1060442570685e94633093c550ce7ff2.<commit_after>
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("interpret", commands.ListRoutes())
self.add_command("list_routes", commands.ListRoutes())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("test_agent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstall_app", commands.UninstallApp())
|
# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("listroutes", commands.ListRoutes())
self.add_command("interpret", commands.RunInterpreter())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("testagent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstallapp", commands.UninstallApp())
Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
This reverts commit 7827598d1060442570685e94633093c550ce7ff2.# -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("interpret", commands.ListRoutes())
self.add_command("list_routes", commands.ListRoutes())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("test_agent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstall_app", commands.UninstallApp())
|
<commit_before># -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("listroutes", commands.ListRoutes())
self.add_command("interpret", commands.RunInterpreter())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("testagent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstallapp", commands.UninstallApp())
<commit_msg>Revert "Made commands consistent on use of underscores. Re-enabled 'interpret' command that had been misplaced."
This reverts commit 7827598d1060442570685e94633093c550ce7ff2.<commit_after># -*- coding:utf-8 -*-
import flask_script as script
from whyis import commands
from whyis.app_factory import app_factory
from whyis.config_utils import import_config_module
class Manager(script.Manager):
def __init__(self):
script.Manager.__init__(self, app_factory)
config = import_config_module()
self.add_option("-n", "--name", dest="app_name", required=False, default=config.project_name)
self.add_option("-c", "--config", dest="config", required=False, default=config.Dev)
self.add_command("configure", commands.Configure())
self.add_command("createuser", commands.CreateUser())
self.add_command("interpret", commands.ListRoutes())
self.add_command("list_routes", commands.ListRoutes())
self.add_command("load", commands.LoadNanopub())
self.add_command("retire", commands.RetireNanopub())
self.add_command("runserver", commands.WhyisServer())
self.add_command("test", commands.Test())
self.add_command("test_agent", commands.TestAgent())
self.add_command("updateuser", commands.UpdateUser())
self.add_command("uninstall_app", commands.UninstallApp())
|
3f635db216c292c0eec720d28ecfbec3e23f1ca5
|
ynr/s3_storage.py
|
ynr/s3_storage.py
|
from storages.backends.s3boto3 import S3Boto3Storage
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class StaticStorage(PipelineMixin, ManifestFilesMixin, S3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
import os
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class PatchedS3Boto3Storage(S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3
it wrongly closes the file upon upload where as the storage backend
expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose
# instance
super()._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(PipelineMixin, ManifestFilesMixin, PatchedS3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(PatchedS3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
Patch S3Boto3Storage to prevent closed file error when collectin static
|
Patch S3Boto3Storage to prevent closed file error when collectin static
This is copied from the aggregator API and prevents a bug where the
storage closes the files too early, raising a boto exception.
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
from storages.backends.s3boto3 import S3Boto3Storage
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class StaticStorage(PipelineMixin, ManifestFilesMixin, S3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
Patch S3Boto3Storage to prevent closed file error when collectin static
This is copied from the aggregator API and prevents a bug where the
storage closes the files too early, raising a boto exception.
|
import os
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class PatchedS3Boto3Storage(S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3
it wrongly closes the file upon upload where as the storage backend
expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose
# instance
super()._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(PipelineMixin, ManifestFilesMixin, PatchedS3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(PatchedS3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
<commit_before>from storages.backends.s3boto3 import S3Boto3Storage
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class StaticStorage(PipelineMixin, ManifestFilesMixin, S3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
<commit_msg>Patch S3Boto3Storage to prevent closed file error when collectin static
This is copied from the aggregator API and prevents a bug where the
storage closes the files too early, raising a boto exception.<commit_after>
|
import os
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class PatchedS3Boto3Storage(S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3
it wrongly closes the file upon upload where as the storage backend
expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose
# instance
super()._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(PipelineMixin, ManifestFilesMixin, PatchedS3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(PatchedS3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
from storages.backends.s3boto3 import S3Boto3Storage
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class StaticStorage(PipelineMixin, ManifestFilesMixin, S3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
Patch S3Boto3Storage to prevent closed file error when collectin static
This is copied from the aggregator API and prevents a bug where the
storage closes the files too early, raising a boto exception.import os
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class PatchedS3Boto3Storage(S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3
it wrongly closes the file upon upload where as the storage backend
expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose
# instance
super()._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(PipelineMixin, ManifestFilesMixin, PatchedS3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(PatchedS3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
<commit_before>from storages.backends.s3boto3 import S3Boto3Storage
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class StaticStorage(PipelineMixin, ManifestFilesMixin, S3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(S3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
<commit_msg>Patch S3Boto3Storage to prevent closed file error when collectin static
This is copied from the aggregator API and prevents a bug where the
storage closes the files too early, raising a boto exception.<commit_after>import os
from storages.backends.s3boto3 import S3Boto3Storage, SpooledTemporaryFile
from django.contrib.staticfiles.storage import ManifestFilesMixin
from pipeline.storage import PipelineMixin
from django.conf import settings
class PatchedS3Boto3Storage(S3Boto3Storage):
def _save_content(self, obj, content, parameters):
"""
We create a clone of the content file as when this is passed to boto3
it wrongly closes the file upon upload where as the storage backend
expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size
content_autoclose = SpooledTemporaryFile()
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the content_autoclose
# instance
super()._save_content(obj, content_autoclose, parameters)
# Cleanup if this is fixed upstream our duplicate should always close
if not content_autoclose.closed:
content_autoclose.close()
class StaticStorage(PipelineMixin, ManifestFilesMixin, PatchedS3Boto3Storage):
"""
Store static files on S3 at STATICFILES_LOCATION, post-process with pipeline
and then create manifest files for them.
"""
location = settings.STATICFILES_LOCATION
class MediaStorage(PatchedS3Boto3Storage):
"""
Store media files on S3 at MEDIAFILES_LOCATION
"""
location = settings.MEDIAFILES_LOCATION
@property
def base_url(self):
"""
This is a small hack around the fact that Django Storages dosn't
provide the same methods as FileSystemStorage.
`base_url` is missing from their implementation of the storage class,
so we emulate it here by calling URL with an empty key name.
"""
return self.url("")
|
7b939076fba1bb11d0ded504bcf10da457b3d092
|
scripts/add_identifiers_to_existing_preprints.py
|
scripts/add_identifiers_to_existing_preprints.py
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import get_top_level_domain, request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
subdomain = get_top_level_domain(preprint.provider.external_url)
assert subdomain.upper() in doi.value
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint from service {}'.format(doi.value, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
Remove check for domain in DOI
|
Remove check for domain in DOI
|
Python
|
apache-2.0
|
mattclark/osf.io,crcresearch/osf.io,aaxelb/osf.io,saradbowman/osf.io,adlius/osf.io,mattclark/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,sloria/osf.io,mfraezz/osf.io,chrisseto/osf.io,pattisdr/osf.io,sloria/osf.io,adlius/osf.io,felliott/osf.io,cslzchen/osf.io,laurenrevere/osf.io,crcresearch/osf.io,saradbowman/osf.io,sloria/osf.io,felliott/osf.io,binoculars/osf.io,caneruguz/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,crcresearch/osf.io,adlius/osf.io,erinspace/osf.io,aaxelb/osf.io,baylee-d/osf.io,icereval/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,icereval/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,cwisecarver/osf.io,caneruguz/osf.io,cslzchen/osf.io,mfraezz/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,felliott/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,binoculars/osf.io,chennan47/osf.io,erinspace/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,caneruguz/osf.io,chrisseto/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,binoculars/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,erinspace/osf.io,mattclark/osf.io,baylee-d/osf.io,chrisseto/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,felliott/osf.io,cslzchen/osf.io,leb2dg/osf.io
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import get_top_level_domain, request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
subdomain = get_top_level_domain(preprint.provider.external_url)
assert subdomain.upper() in doi.value
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint from service {}'.format(doi.value, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
Remove check for domain in DOI
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
<commit_before>import logging
import time
from website.app import init_app
from website.identifiers.utils import get_top_level_domain, request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
subdomain = get_top_level_domain(preprint.provider.external_url)
assert subdomain.upper() in doi.value
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint from service {}'.format(doi.value, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
<commit_msg>Remove check for domain in DOI<commit_after>
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
import logging
import time
from website.app import init_app
from website.identifiers.utils import get_top_level_domain, request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
subdomain = get_top_level_domain(preprint.provider.external_url)
assert subdomain.upper() in doi.value
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint from service {}'.format(doi.value, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
Remove check for domain in DOIimport logging
import time
from website.app import init_app
from website.identifiers.utils import request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
<commit_before>import logging
import time
from website.app import init_app
from website.identifiers.utils import get_top_level_domain, request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
subdomain = get_top_level_domain(preprint.provider.external_url)
assert subdomain.upper() in doi.value
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint from service {}'.format(doi.value, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
<commit_msg>Remove check for domain in DOI<commit_after>import logging
import time
from website.app import init_app
from website.identifiers.utils import request_identifiers_from_ezid
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def add_identifiers_to_preprints():
from osf.models import PreprintService
preprints_without_identifiers = PreprintService.objects.filter(identifiers__isnull=True)
logger.info('About to add identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
for preprint in preprints_without_identifiers:
logger.info('Saving identifier for preprint {} from source {}'.format(preprint._id, preprint.provider.name))
ezid_response = request_identifiers_from_ezid(preprint)
preprint.set_preprint_identifiers(ezid_response)
preprint.save()
doi = preprint.get_identifier('doi')
assert preprint._id.upper() in doi.value
logger.info('Created DOI {} for Preprint with guid {} from service {}'.format(doi.value, preprint._id, preprint.provider.name))
time.sleep(1)
logger.info('Finished Adding identifiers to {} preprints.'.format(preprints_without_identifiers.count()))
if __name__ == '__main__':
init_app(routes=False)
add_identifiers_to_preprints()
|
ade3a316166d3c4c362becd7880e60bd9387b259
|
courriers/management/commands/mailjet_sync_unsubscribed.py
|
courriers/management/commands/mailjet_sync_unsubscribed.py
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list(unsub=1)
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(mailjet_users) - set(unsubscribed_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
Select only unsubscribed contacts from mailjet on sync script
|
Select only unsubscribed contacts from mailjet on sync script
|
Python
|
mit
|
ulule/django-courriers,ulule/django-courriers
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
Select only unsubscribed contacts from mailjet on sync script
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list(unsub=1)
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(mailjet_users) - set(unsubscribed_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
<commit_before>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
<commit_msg>Select only unsubscribed contacts from mailjet on sync script<commit_after>
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list(unsub=1)
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(mailjet_users) - set(unsubscribed_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
Select only unsubscribed contacts from mailjet on sync scriptfrom django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list(unsub=1)
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(mailjet_users) - set(unsubscribed_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
<commit_before>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list()
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(unsubscribed_users) - set(mailjet_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
<commit_msg>Select only unsubscribed contacts from mailjet on sync script<commit_after>from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--connection',
action='store',
dest='connection',
default=DEFAULT_DB_ALIAS,
),
)
def handle(self, *args, **options):
from courriers.backends import get_backend
from courriers.models import NewsletterSubscriber
self.connection = options.get('connection')
backend_klass = get_backend()
backend = backend_klass()
unsubscribed_users = (NewsletterSubscriber.objects.using(self.connection)
.filter(is_unsubscribed=True)
.values_list('email', flat=True)
.order_by('-unsubscribed_at'))
mailjet_contacts = backend.mailjet_api.contact.list(unsub=1)
mailjet_users = [contact['email'] for contact in mailjet_contacts['result']]
diff = list(set(mailjet_users) - set(unsubscribed_users))
print "%d contacts to unsubscribe" % len(diff)
for email in diff:
backend.unregister(email)
print "Unsubscribe user: %s" % email
|
be40174929193085ccd38683e64944fb4aabb26b
|
serial_reader.py
|
serial_reader.py
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
Add option to timestamp each line from serial
|
Add option to timestamp each line from serial
|
Python
|
unlicense
|
recursify/serial-debug-tool
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
Add option to timestamp each line from serial
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
<commit_before>#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
<commit_msg>Add option to timestamp each line from serial<commit_after>
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
Add option to timestamp each line from serial#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
<commit_before>#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
<commit_msg>Add option to timestamp each line from serial<commit_after>#!/usr/bin/env python
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
f035ea7fb453d09b37f5187c4f61e855b048cbd5
|
aslo/web/__init__.py
|
aslo/web/__init__.py
|
from flask import Blueprint, g
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
if not g.lang_code.strip():
print("No code :(")
g.lang_code = 'en'
from . import views # noqa
|
from flask import Blueprint, g, session
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
# Tie user session to a particular language,
# so it can be retrived when we pop the request values
session['lang_code'] = g.lang_code
from . import views # noqa
|
Use sessions to Tie it a with language. Also helps us to retrieve session code later " "
|
Use sessions to Tie it a with language. Also helps us to retrieve session code later "
"
|
Python
|
mit
|
jatindhankhar/aslo-v3,jatindhankhar/aslo-v3,jatindhankhar/aslo-v3,jatindhankhar/aslo-v3
|
from flask import Blueprint, g
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
if not g.lang_code.strip():
print("No code :(")
g.lang_code = 'en'
from . import views # noqa
Use sessions to Tie it a with language. Also helps us to retrieve session code later "
"
|
from flask import Blueprint, g, session
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
# Tie user session to a particular language,
# so it can be retrived when we pop the request values
session['lang_code'] = g.lang_code
from . import views # noqa
|
<commit_before>from flask import Blueprint, g
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
if not g.lang_code.strip():
print("No code :(")
g.lang_code = 'en'
from . import views # noqa
<commit_msg>Use sessions to Tie it a with language. Also helps us to retrieve session code later "
"<commit_after>
|
from flask import Blueprint, g, session
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
# Tie user session to a particular language,
# so it can be retrived when we pop the request values
session['lang_code'] = g.lang_code
from . import views # noqa
|
from flask import Blueprint, g
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
if not g.lang_code.strip():
print("No code :(")
g.lang_code = 'en'
from . import views # noqa
Use sessions to Tie it a with language. Also helps us to retrieve session code later "
"from flask import Blueprint, g, session
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
# Tie user session to a particular language,
# so it can be retrived when we pop the request values
session['lang_code'] = g.lang_code
from . import views # noqa
|
<commit_before>from flask import Blueprint, g
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
if not g.lang_code.strip():
print("No code :(")
g.lang_code = 'en'
from . import views # noqa
<commit_msg>Use sessions to Tie it a with language. Also helps us to retrieve session code later "
"<commit_after>from flask import Blueprint, g, session
web = Blueprint('web', __name__, template_folder='templates',
static_folder='static',
static_url_path='/web/static',
url_prefix='/<lang_code>')
@web.url_defaults
def add_language_code(endpoint, values):
values.setdefault('lang_code', g.lang_code)
@web.url_value_preprocessor
def pull_lang_code(point, values):
g.lang_code = values.pop('lang_code')
# Tie user session to a particular language,
# so it can be retrived when we pop the request values
session['lang_code'] = g.lang_code
from . import views # noqa
|
5d332259e16758bc43201073db91409390be9134
|
UM/Operations/GroupedOperation.py
|
UM/Operations/GroupedOperation.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
Remove removeOperation from grouped operation
|
Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
<commit_msg>Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
<commit_before># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Removes an operation from this group.
def removeOperation(self, index):
del self._children[index]
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
<commit_msg>Remove removeOperation from grouped operation
This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import Operation
## An operation that groups several other operations together.
#
# The intent of this operation is to hide an underlying chain of operations
# from the user if they correspond to only one interaction with the user, such
# as an operation applied to multiple scene nodes or a re-arrangement of
# multiple items in the scene.
class GroupedOperation(Operation.Operation):
## Creates a new grouped operation.
#
# The grouped operation is empty after its initialisation.
def __init__(self):
super().__init__()
self._children = []
## Adds an operation to this group.
#
# The operation will be undone together with the rest of the operations in
# this group.
# Note that when the order matters, the operations are undone in reverse
# order as the order in which they are added.
def addOperation(self, op):
self._children.append(op)
## Undo all operations in this group.
#
# The operations are undone in reverse order as the order in which they
# were added.
def undo(self):
for op in reversed(self._children):
op.undo()
## Redoes all operations in this group.
def redo(self):
for op in self._children:
op.redo()
|
ef75ec5d27fcbcee1b451b5e22828a1129cfd209
|
opps/boxes/models.py
|
opps/boxes/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'))
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
Add default (7) on limit field queryset model boxes
|
Add default (7) on limit field queryset model boxes
|
Python
|
mit
|
YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,jeanmask/opps,jeanmask/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'))
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
Add default (7) on limit field queryset model boxes
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'))
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
<commit_msg>Add default (7) on limit field queryset model boxes<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'))
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
Add default (7) on limit field queryset model boxes#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'))
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
<commit_msg>Add default (7) on limit field queryset model boxes<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#from django.conf import settings
#from django.utils.importlib import import_module
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Publishable, BaseBox
try:
OPPS_APPS = tuple([(u"{0}.{1}".format(
app._meta.app_label, app._meta.object_name), u"{0} - {1}".format(
app._meta.app_label, app._meta.object_name))
for app in models.get_models() if 'opps.' in app.__module__])
except ImportError:
OPPS_APPS = tuple([])
class QuerySet(Publishable):
name = models.CharField(_(u"Dynamic queryset name"), max_length=140)
slug = models.SlugField(
_(u"Slug"),
db_index=True,
max_length=150,
unique=True,
)
model = models.CharField(_(u'Model'), max_length=150, choices=OPPS_APPS)
limit = models.PositiveIntegerField(_(u'Limit'), default=7)
order = models.CharField(_('Order'), max_length=1, choices=(
('-', 'DESC'), ('+', 'ASC')))
class DynamicBox(BaseBox):
dynamicqueryset = models.ForeignKey(
'boxes.QuerySet',
verbose_name=_(u'Query Set')
)
|
38d7092f07884cb2530f95a5dc24ba177bfbe699
|
ncclient/operations/third_party/nexus/rpc.py
|
ncclient/operations/third_party/nexus/rpc.py
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmd):
parent_node = etree.Element(qualify('exec-command', NXOS_1_0))
child_node = etree.SubElement(parent_node, qualify('cmd', NXOS_1_0))
child_node.text = cmd
return self._request(parent_node)
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmds):
node = etree.Element(qualify('exec-command', NXOS_1_0))
for cmd in cmds:
etree.SubElement(node, qualify('cmd', NXOS_1_0)).text = cmd
return self._request(node)
|
Allow specifying multiple cmd elements
|
Allow specifying multiple cmd elements
|
Python
|
apache-2.0
|
nwautomator/ncclient,joysboy/ncclient,aitorhh/ncclient,ncclient/ncclient,vnitinv/ncclient,cmoberg/ncclient,earies/ncclient,einarnn/ncclient,leopoul/ncclient,kroustou/ncclient,lightlu/ncclient,nnakamot/ncclient,OpenClovis/ncclient,GIC-de/ncclient
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmd):
parent_node = etree.Element(qualify('exec-command', NXOS_1_0))
child_node = etree.SubElement(parent_node, qualify('cmd', NXOS_1_0))
child_node.text = cmd
return self._request(parent_node)
Allow specifying multiple cmd elements
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmds):
node = etree.Element(qualify('exec-command', NXOS_1_0))
for cmd in cmds:
etree.SubElement(node, qualify('cmd', NXOS_1_0)).text = cmd
return self._request(node)
|
<commit_before>from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmd):
parent_node = etree.Element(qualify('exec-command', NXOS_1_0))
child_node = etree.SubElement(parent_node, qualify('cmd', NXOS_1_0))
child_node.text = cmd
return self._request(parent_node)
<commit_msg>Allow specifying multiple cmd elements<commit_after>
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmds):
node = etree.Element(qualify('exec-command', NXOS_1_0))
for cmd in cmds:
etree.SubElement(node, qualify('cmd', NXOS_1_0)).text = cmd
return self._request(node)
|
from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmd):
parent_node = etree.Element(qualify('exec-command', NXOS_1_0))
child_node = etree.SubElement(parent_node, qualify('cmd', NXOS_1_0))
child_node.text = cmd
return self._request(parent_node)
Allow specifying multiple cmd elementsfrom lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmds):
node = etree.Element(qualify('exec-command', NXOS_1_0))
for cmd in cmds:
etree.SubElement(node, qualify('cmd', NXOS_1_0)).text = cmd
return self._request(node)
|
<commit_before>from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmd):
parent_node = etree.Element(qualify('exec-command', NXOS_1_0))
child_node = etree.SubElement(parent_node, qualify('cmd', NXOS_1_0))
child_node.text = cmd
return self._request(parent_node)
<commit_msg>Allow specifying multiple cmd elements<commit_after>from lxml import etree
from ncclient.xml_ import *
from ncclient.operations.rpc import RPC
class ExecCommand(RPC):
def request(self, cmds):
node = etree.Element(qualify('exec-command', NXOS_1_0))
for cmd in cmds:
etree.SubElement(node, qualify('cmd', NXOS_1_0)).text = cmd
return self._request(node)
|
0a152c792e2ebf20056780b5a20765175d73108b
|
ipv6map/geodata/admin.py
|
ipv6map/geodata/admin.py
|
from django.contrib import admin
from . import models
class BaseReadOnlyAdmin(admin.ModelAdmin):
list_display_links = None
def has_change_permission(self, request, obj=None):
return False if obj else True
@admin.register(models.Version)
class VersionAdmin(BaseReadOnlyAdmin):
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(BaseReadOnlyAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
|
from django.contrib import admin
from . import models
@admin.register(models.Version)
class VersionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['publish_date', 'location_count'],
}),
("Status", {
'fields': ['is_active'],
}),
]
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
readonly_fields = ['publish_date', 'location_count']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(admin.ModelAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_display_links = None
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
def has_change_permission(self, request, obj=None):
return False if obj else True
|
Allow toggling active/inactive in VersionAdmin
|
Allow toggling active/inactive in VersionAdmin
|
Python
|
unlicense
|
rlmuraya/ipv6map,rlmuraya/ipv6map,rlmuraya/ipv6map,rlmuraya/ipv6map
|
from django.contrib import admin
from . import models
class BaseReadOnlyAdmin(admin.ModelAdmin):
list_display_links = None
def has_change_permission(self, request, obj=None):
return False if obj else True
@admin.register(models.Version)
class VersionAdmin(BaseReadOnlyAdmin):
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(BaseReadOnlyAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
Allow toggling active/inactive in VersionAdmin
|
from django.contrib import admin
from . import models
@admin.register(models.Version)
class VersionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['publish_date', 'location_count'],
}),
("Status", {
'fields': ['is_active'],
}),
]
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
readonly_fields = ['publish_date', 'location_count']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(admin.ModelAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_display_links = None
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
def has_change_permission(self, request, obj=None):
return False if obj else True
|
<commit_before>from django.contrib import admin
from . import models
class BaseReadOnlyAdmin(admin.ModelAdmin):
list_display_links = None
def has_change_permission(self, request, obj=None):
return False if obj else True
@admin.register(models.Version)
class VersionAdmin(BaseReadOnlyAdmin):
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(BaseReadOnlyAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
<commit_msg>Allow toggling active/inactive in VersionAdmin<commit_after>
|
from django.contrib import admin
from . import models
@admin.register(models.Version)
class VersionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['publish_date', 'location_count'],
}),
("Status", {
'fields': ['is_active'],
}),
]
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
readonly_fields = ['publish_date', 'location_count']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(admin.ModelAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_display_links = None
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
def has_change_permission(self, request, obj=None):
return False if obj else True
|
from django.contrib import admin
from . import models
class BaseReadOnlyAdmin(admin.ModelAdmin):
list_display_links = None
def has_change_permission(self, request, obj=None):
return False if obj else True
@admin.register(models.Version)
class VersionAdmin(BaseReadOnlyAdmin):
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(BaseReadOnlyAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
Allow toggling active/inactive in VersionAdminfrom django.contrib import admin
from . import models
@admin.register(models.Version)
class VersionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['publish_date', 'location_count'],
}),
("Status", {
'fields': ['is_active'],
}),
]
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
readonly_fields = ['publish_date', 'location_count']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(admin.ModelAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_display_links = None
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
def has_change_permission(self, request, obj=None):
return False if obj else True
|
<commit_before>from django.contrib import admin
from . import models
class BaseReadOnlyAdmin(admin.ModelAdmin):
list_display_links = None
def has_change_permission(self, request, obj=None):
return False if obj else True
@admin.register(models.Version)
class VersionAdmin(BaseReadOnlyAdmin):
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(BaseReadOnlyAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
<commit_msg>Allow toggling active/inactive in VersionAdmin<commit_after>from django.contrib import admin
from . import models
@admin.register(models.Version)
class VersionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['publish_date', 'location_count'],
}),
("Status", {
'fields': ['is_active'],
}),
]
list_display = ['publish_date', 'location_count', 'is_active']
list_filter = ['is_active']
readonly_fields = ['publish_date', 'location_count']
def location_count(self, obj):
return obj.location_set.count()
@admin.register(models.Location)
class LocationAdmin(admin.ModelAdmin):
list_display = ['id', 'latitude', 'longitude', 'density', '_version']
list_display_links = None
list_filter = ['version']
def _version(self, obj):
return obj.version.publish_date
def has_change_permission(self, request, obj=None):
return False if obj else True
|
e486b4d2fe9fb788a027a354aa921dcaa9917946
|
tests/integrations/conftest.py
|
tests/integrations/conftest.py
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
bugsnag.configure(app_type=None)
server.shutdown()
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag.legacy as global_setup
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
# Reset shared client config
global_setup.configuration = bugsnag.Configuration()
global_setup.default_client.configuration = global_setup.configuration
server.shutdown()
|
Reset configuration fixture after each test
|
tests: Reset configuration fixture after each test
|
Python
|
mit
|
bugsnag/bugsnag-python,bugsnag/bugsnag-python
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
bugsnag.configure(app_type=None)
server.shutdown()
tests: Reset configuration fixture after each test
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag.legacy as global_setup
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
# Reset shared client config
global_setup.configuration = bugsnag.Configuration()
global_setup.default_client.configuration = global_setup.configuration
server.shutdown()
|
<commit_before>import pytest
from tests.utils import FakeBugsnagServer
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
bugsnag.configure(app_type=None)
server.shutdown()
<commit_msg>tests: Reset configuration fixture after each test<commit_after>
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag.legacy as global_setup
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
# Reset shared client config
global_setup.configuration = bugsnag.Configuration()
global_setup.default_client.configuration = global_setup.configuration
server.shutdown()
|
import pytest
from tests.utils import FakeBugsnagServer
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
bugsnag.configure(app_type=None)
server.shutdown()
tests: Reset configuration fixture after each testimport pytest
from tests.utils import FakeBugsnagServer
import bugsnag.legacy as global_setup
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
# Reset shared client config
global_setup.configuration = bugsnag.Configuration()
global_setup.default_client.configuration = global_setup.configuration
server.shutdown()
|
<commit_before>import pytest
from tests.utils import FakeBugsnagServer
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
bugsnag.configure(app_type=None)
server.shutdown()
<commit_msg>tests: Reset configuration fixture after each test<commit_after>import pytest
from tests.utils import FakeBugsnagServer
import bugsnag.legacy as global_setup
import bugsnag
@pytest.fixture
def bugsnag_server():
server = FakeBugsnagServer()
bugsnag.configure(endpoint=server.url, api_key='3874876376238728937')
yield server
# Reset shared client config
global_setup.configuration = bugsnag.Configuration()
global_setup.default_client.configuration = global_setup.configuration
server.shutdown()
|
7bea8f5cb6f958225ce61a9f7ce439e9a80036ea
|
tests/unit/utils/cache_test.py
|
tests/unit/utils/cache_test.py
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
assert isinstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
assert 'foo' not in cd
cd['foo'] = 'bar'
assert cd['foo'] == 'bar'
del cd['foo']
assert 'foo' not in cd
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
assert 'foo' in cd
assert cd['foo'] == 'bar'
time.sleep(0.1)
assert 'foo' not in cd
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
self.assertIsInstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
time.sleep(0.1)
self.assertNotIn('foo', cd)
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
Change python asserts to unittest asserts
|
Change python asserts to unittest asserts
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
assert isinstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
assert 'foo' not in cd
cd['foo'] = 'bar'
assert cd['foo'] == 'bar'
del cd['foo']
assert 'foo' not in cd
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
assert 'foo' in cd
assert cd['foo'] == 'bar'
time.sleep(0.1)
assert 'foo' not in cd
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
Change python asserts to unittest asserts
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
self.assertIsInstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
time.sleep(0.1)
self.assertNotIn('foo', cd)
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
<commit_before># -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
assert isinstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
assert 'foo' not in cd
cd['foo'] = 'bar'
assert cd['foo'] == 'bar'
del cd['foo']
assert 'foo' not in cd
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
assert 'foo' in cd
assert cd['foo'] == 'bar'
time.sleep(0.1)
assert 'foo' not in cd
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
<commit_msg>Change python asserts to unittest asserts<commit_after>
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
self.assertIsInstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
time.sleep(0.1)
self.assertNotIn('foo', cd)
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
assert isinstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
assert 'foo' not in cd
cd['foo'] = 'bar'
assert cd['foo'] == 'bar'
del cd['foo']
assert 'foo' not in cd
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
assert 'foo' in cd
assert cd['foo'] == 'bar'
time.sleep(0.1)
assert 'foo' not in cd
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
Change python asserts to unittest asserts# -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
self.assertIsInstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
time.sleep(0.1)
self.assertNotIn('foo', cd)
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
<commit_before># -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
assert isinstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
assert 'foo' not in cd
cd['foo'] = 'bar'
assert cd['foo'] == 'bar'
del cd['foo']
assert 'foo' not in cd
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
assert 'foo' in cd
assert cd['foo'] == 'bar'
time.sleep(0.1)
assert 'foo' not in cd
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
<commit_msg>Change python asserts to unittest asserts<commit_after># -*- coding: utf-8 -*-
'''
tests.unit.utils.cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt cache objects
'''
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
import time
class CacheDictTestCase(TestCase):
def test_sanity(self):
'''
Make sure you can instantiate etc.
'''
cd = cache.CacheDict(5)
self.assertIsInstance(cd, cache.CacheDict)
# do some tests to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
def test_ttl(self):
cd = cache.CacheDict(0.1)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
time.sleep(0.1)
self.assertNotIn('foo', cd)
# make sure that a get would get a regular old key error
self.assertRaises(KeyError, cd.__getitem__, 'foo')
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDictTestCase, needs_daemon=False)
|
091a08a8fe30c3cc00c6b85552e47a1b15b807b8
|
preferences/views.py
|
preferences/views.py
|
from django.shortcuts import render
# Create your views here.
from registration.views import RegistrationView
from registration.forms import RegistrationFormUniqueEmail
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
|
from django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
Add userprefs and email reg view
|
Add userprefs and email reg view
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
from django.shortcuts import render
# Create your views here.
from registration.views import RegistrationView
from registration.forms import RegistrationFormUniqueEmail
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmailAdd userprefs and email reg view
|
from django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
<commit_before>from django.shortcuts import render
# Create your views here.
from registration.views import RegistrationView
from registration.forms import RegistrationFormUniqueEmail
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail<commit_msg>Add userprefs and email reg view<commit_after>
|
from django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
from django.shortcuts import render
# Create your views here.
from registration.views import RegistrationView
from registration.forms import RegistrationFormUniqueEmail
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmailAdd userprefs and email reg viewfrom django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
<commit_before>from django.shortcuts import render
# Create your views here.
from registration.views import RegistrationView
from registration.forms import RegistrationFormUniqueEmail
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail<commit_msg>Add userprefs and email reg view<commit_after>from django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
5b877d2c42a44fb4ebd1c72f89a595ac5c095e07
|
wsgi/bufsm/mainapp/urls.py
|
wsgi/bufsm/mainapp/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
url(r'^test$', views.testLinha),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
]
|
Test in the original URL
|
Test in the original URL
|
Python
|
mit
|
bufsm/bufsm,bufsm/bufsm,bufsm/bufsm,bufsm/bufsm,bufsm/bufsm
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
url(r'^test$', views.testLinha),
]
Test in the original URL
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
url(r'^test$', views.testLinha),
]
<commit_msg>Test in the original URL<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
url(r'^test$', views.testLinha),
]
Test in the original URLfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
url(r'^test$', views.testLinha),
]
<commit_msg>Test in the original URL<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha),
url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha),
]
|
fc7ad7d55622aa9edb77b9f7822260110a772805
|
db.py
|
db.py
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Bring DB script into line with other prod scripts
|
Bring DB script into line with other prod scripts
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
Bring DB script into line with other prod scripts
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<commit_msg>Bring DB script into line with other prod scripts<commit_after>
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
Bring DB script into line with other prod scriptsfrom flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
<commit_before>from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
secrets = getAllSecrets(region="eu-west-1")
for key, val in secrets.items():
os.environ[key] = val
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<commit_msg>Bring DB script into line with other prod scripts<commit_after>from flask.ext.script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import create_app, db
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_API_ENVIRONMENT'] = configs[environment]
application = create_app()
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
cf7ed25377cb708d8d344bce406bd63fc4d22982
|
tests/query_test/test_udfs.py
|
tests/query_test/test_udfs.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
# This must run serially because other tests executing 'invalidate metadata' will nuke
# all loaded functions.
# TODO: This can be run in parallel once functions are persisted correctly.
@pytest.mark.execute_serially
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
Fix UDF test, take two
|
Fix UDF test, take two
Change-Id: I817389d94dab665199d2c1b7365e8ce0d1495c41
Reviewed-on: http://gerrit.ent.cloudera.com:8080/504
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Tested-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
|
Python
|
apache-2.0
|
michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
Fix UDF test, take two
Change-Id: I817389d94dab665199d2c1b7365e8ce0d1495c41
Reviewed-on: http://gerrit.ent.cloudera.com:8080/504
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Tested-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
# This must run serially because other tests executing 'invalidate metadata' will nuke
# all loaded functions.
# TODO: This can be run in parallel once functions are persisted correctly.
@pytest.mark.execute_serially
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
<commit_msg>Fix UDF test, take two
Change-Id: I817389d94dab665199d2c1b7365e8ce0d1495c41
Reviewed-on: http://gerrit.ent.cloudera.com:8080/504
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Tested-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
# This must run serially because other tests executing 'invalidate metadata' will nuke
# all loaded functions.
# TODO: This can be run in parallel once functions are persisted correctly.
@pytest.mark.execute_serially
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
Fix UDF test, take two
Change-Id: I817389d94dab665199d2c1b7365e8ce0d1495c41
Reviewed-on: http://gerrit.ent.cloudera.com:8080/504
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Tested-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
# This must run serially because other tests executing 'invalidate metadata' will nuke
# all loaded functions.
# TODO: This can be run in parallel once functions are persisted correctly.
@pytest.mark.execute_serially
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
<commit_msg>Fix UDF test, take two
Change-Id: I817389d94dab665199d2c1b7365e8ce0d1495c41
Reviewed-on: http://gerrit.ent.cloudera.com:8080/504
Reviewed-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com>
Tested-by: Skye Wanderman-Milne <6d4b168ab637b0a20cc9dbf96abb2537f372f946@cloudera.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestUdfs(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestUdfs, cls).add_test_dimensions()
# UDFs require codegen
cls.TestMatrix.add_constraint(
lambda v: v.get_value('exec_option')['disable_codegen'] == False)
# There is no reason to run these tests using all dimensions.
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format == 'text' and\
v.get_value('table_format').compression_codec == 'none')
# This must run serially because other tests executing 'invalidate metadata' will nuke
# all loaded functions.
# TODO: This can be run in parallel once functions are persisted correctly.
@pytest.mark.execute_serially
def test_udfs(self, vector):
self.run_test_case('QueryTest/udf', vector)
|
f0dc039976831ece319cb3c4992af54ac3c4c62d
|
virtool/pathoscope/subtract.py
|
virtool/pathoscope/subtract.py
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.high_score(l)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam, snap=False):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam, snap=snap)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.get_score(l, snap=snap)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
Add parameters for dealing with SNAP output to Pathoscope sam module
|
Add parameters for dealing with SNAP output to Pathoscope sam module
|
Python
|
mit
|
igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.high_score(l)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))Add parameters for dealing with SNAP output to Pathoscope sam module
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam, snap=False):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam, snap=snap)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.get_score(l, snap=snap)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
<commit_before>from virtool.pathoscope import sam
def run(isolate_sam, host_sam):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.high_score(l)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))<commit_msg>Add parameters for dealing with SNAP output to Pathoscope sam module<commit_after>
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam, snap=False):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam, snap=snap)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.get_score(l, snap=snap)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
from virtool.pathoscope import sam
def run(isolate_sam, host_sam):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.high_score(l)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))Add parameters for dealing with SNAP output to Pathoscope sam modulefrom virtool.pathoscope import sam
def run(isolate_sam, host_sam, snap=False):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam, snap=snap)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.get_score(l, snap=snap)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
<commit_before>from virtool.pathoscope import sam
def run(isolate_sam, host_sam):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.high_score(l)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))<commit_msg>Add parameters for dealing with SNAP output to Pathoscope sam module<commit_after>from virtool.pathoscope import sam
def run(isolate_sam, host_sam, snap=False):
# Get a mapping score for every read mapped to the host genome
host_scores = sam.all_scores(host_sam, snap=snap)
# This list will contain the read_ids for all reads that had better mapping qualities against the host
# genome
skipped = list()
subtracted_list = list()
for line in isolate_sam:
# Parse the virus SAM file and get the alignment score for each line
if line[0] in ["#", "@"]:
subtracted_list.append(line)
continue
l = line.split("\t")
read_id = l[0]
virus_score = sam.get_score(l, snap=snap)
# Write each line from the virus SAM to a new file if its score is better against the virus
# than the host. Discard it if it isn't and add the read_id to the list of skipped reads
if virus_score is not None:
try:
if host_scores[read_id] >= virus_score:
subtracted_list.append(line)
else:
skipped.append(read_id)
except KeyError:
subtracted_list.append(line)
# Return the number of read mapping that were eliminated due to higher similarity to the host than to
# the intitially mapped virus
return subtracted_list, len((set(skipped)))
|
2f67880e777c9efa5192f5c34ce5fc7d71fc0f08
|
partner_communication_switzerland/wizards/end_contract_wizard.py
|
partner_communication_switzerland/wizards/end_contract_wizard.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
child = self.child_id
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=child.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=self.contract_id.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
FIX end contract depart letter generation
|
FIX end contract depart letter generation
|
Python
|
agpl-3.0
|
eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
child = self.child_id
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=child.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
FIX end contract depart letter generation
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=self.contract_id.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
child = self.child_id
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=child.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
<commit_msg>FIX end contract depart letter generation<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=self.contract_id.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
child = self.child_id
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=child.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
FIX end contract depart letter generation# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=self.contract_id.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
child = self.child_id
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=child.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
<commit_msg>FIX end contract depart letter generation<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, fields, api
class EndContractWizard(models.TransientModel):
_inherit = 'end.contract.wizard'
generate_communication = fields.Boolean(
'Create depart communication')
@api.multi
def end_contract(self):
self.ensure_one()
if self.generate_communication:
exit_config = self.env.ref(
'partner_communication_switzerland.'
'lifecycle_child_unplanned_exit')
self.contract_id.with_context(
default_object_ids=self.contract_id.id,
default_auto_send=False).send_communication(exit_config)
return super(EndContractWizard, self).end_contract()
|
cb2f768f01cc3d40fe95574d0702470d480888c2
|
DTError.py
|
DTError.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
Allow passing None for function, and use the executable name in that case. Save error list anonymously
|
Allow passing None for function, and use the executable
name in that case.
Save error list anonymously
|
Python
|
bsd-3-clause
|
amaxwell/datatank_py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
Allow passing None for function, and use the executable
name in that case.
Save error list anonymously
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
<commit_msg>Allow passing None for function, and use the executable
name in that case.
Save error list anonymously<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
Allow passing None for function, and use the executable
name in that case.
Save error list anonymously#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile[name] = _errors
<commit_msg>Allow passing None for function, and use the executable
name in that case.
Save error list anonymously<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This software is under a BSD license. See LICENSE.txt for details.
__all__ = ["DTErrorMessage", "DTSaveError"]
import sys
import os
_errors = []
def DTErrorMessage(fcn, msg):
"""Accumulate a message and echo to standard error.
Arguments:
fcn -- typically a function or module name
msg -- an error or warning message
Returns:
Nothing
Typically you call this each time an error or warning
should be presented, then call DTSaveError before exiting.
"""
if fcn == None:
fcn = os.path.basename(sys.argv[0])
err_msg = "%s: %s" % (fcn, msg)
_errors.append(err_msg)
sys.stderr.write(err_msg + "\n")
def DTSaveError(datafile, name="ExecutionErrors"):
"""Save accumulated messages to a file.
Arguments:
datafile -- an open DTDataFile instance
name -- defaults to "ExecutionErrors" for DataTank
Returns:
Nothing
This will be displayed in DataTank's Messages panel.
"""
if len(_errors):
datafile.write_anonymous(_errors, name)
|
9e2728e7589deebce39ed6bca385f36c6c90f718
|
tca/chat/models.py
|
tca/chat/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
Add a timestamp field to the Message model
|
Add a timestamp field to the Message model
The timestamp is automatically assigned at the moment of creation
of the message. It is represented as a Month-based ISO8601
date-time format when sending the Message resource representation
in JSON.
Closes #1
|
Python
|
bsd-3-clause
|
mlalic/TumCampusAppBackend,mlalic/TumCampusAppBackend
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
Add a timestamp field to the Message model
The timestamp is automatically assigned at the moment of creation
of the message. It is represented as a Month-based ISO8601
date-time format when sending the Message resource representation
in JSON.
Closes #1
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
<commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
<commit_msg>Add a timestamp field to the Message model
The timestamp is automatically assigned at the moment of creation
of the message. It is represented as a Month-based ISO8601
date-time format when sending the Message resource representation
in JSON.
Closes #1<commit_after>
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
Add a timestamp field to the Message model
The timestamp is automatically assigned at the moment of creation
of the message. It is represented as a Month-based ISO8601
date-time format when sending the Message resource representation
in JSON.
Closes #1from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
<commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
<commit_msg>Add a timestamp field to the Message model
The timestamp is automatically assigned at the moment of creation
of the message. It is represented as a Month-based ISO8601
date-time format when sending the Message resource representation
in JSON.
Closes #1<commit_after>from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Member(models.Model):
lrz_id = models.CharField(max_length=7, unique=True)
first_name = models.CharField(max_length=30, blank=True)
last_name = models.CharField(max_length=30, blank=True)
def __str__(self):
return self.lrz_id
@python_2_unicode_compatible
class ChatRoom(models.Model):
name = models.CharField(max_length=100, unique=True)
members = models.ManyToManyField(Member, related_name='chat_rooms')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Message(models.Model):
text = models.TextField()
member = models.ForeignKey(Member, related_name='messages')
chat_room = models.ForeignKey(ChatRoom, related_name='messages')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{text} ({member})'.format(
text=self.text,
member=self.member
)
|
5caa758b5638e0244da6818aa27092ad41801cc1
|
kazoo/tests/test_interrupt.py
|
kazoo/tests/test_interrupt.py
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
Add a sanity check per @bbangert
|
Add a sanity check per @bbangert
|
Python
|
apache-2.0
|
AlexanderplUs/kazoo,bsanders/kazoo,tempbottle/kazoo,rockerbox/kazoo,python-zk/kazoo,AlexanderplUs/kazoo,pombredanne/kazoo,Asana/kazoo,kormat/kazoo,harlowja/kazoo,rockerbox/kazoo,rgs1/kazoo,jacksontj/kazoo,max0d41/kazoo,bsanders/kazoo,rgs1/kazoo,tempbottle/kazoo,pombredanne/kazoo,harlowja/kazoo,max0d41/kazoo,kormat/kazoo,jacksontj/kazoo,python-zk/kazoo
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
Add a sanity check per @bbangert
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
<commit_before>import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
<commit_msg>Add a sanity check per @bbangert<commit_after>
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
Add a sanity check per @bbangertimport os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
<commit_before>import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
self.client.create(path, b"1")
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
self.client.get_children(path)
<commit_msg>Add a sanity check per @bbangert<commit_after>import os
from nose import SkipTest
from sys import platform
from kazoo.testing import KazooTestCase
class KazooInterruptTests(KazooTestCase):
def test_interrupted_systemcall(self):
'''
Make sure interrupted system calls don't break the world, since we can't
control what all signals our connection thread will get
'''
if 'linux' not in platform:
raise SkipTest('Unable to reproduce error case on non-linux platforms')
path = 'interrupt_test'
value = b"1"
self.client.create(path, value)
# set the euid to the current process' euid.
# glibc sends SIGRT to all children, which will interrupt the system call
os.seteuid(os.geteuid())
# basic sanity test that it worked alright
assert self.client.get(path)[0] == value
|
62c59e1efdd0a5c04bb3854dfb3f98ed5c237c21
|
skyfield/tests/test_almanac.py
|
skyfield/tests/test_almanac.py
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
p = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (p == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
f = almanac.fraction_illuminated(e, 'moon', t0[-1]).round(2)
assert f == 0.62
f = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (f == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
Add test for single-value fraction_illuminated()
|
Add test for single-value fraction_illuminated()
|
Python
|
mit
|
skyfielders/python-skyfield,skyfielders/python-skyfield
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
p = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (p == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
Add test for single-value fraction_illuminated()
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
f = almanac.fraction_illuminated(e, 'moon', t0[-1]).round(2)
assert f == 0.62
f = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (f == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
<commit_before>from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
p = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (p == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
<commit_msg>Add test for single-value fraction_illuminated()<commit_after>
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
f = almanac.fraction_illuminated(e, 'moon', t0[-1]).round(2)
assert f == 0.62
f = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (f == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
p = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (p == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
Add test for single-value fraction_illuminated()from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
f = almanac.fraction_illuminated(e, 'moon', t0[-1]).round(2)
assert f == 0.62
f = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (f == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
<commit_before>from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
p = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (p == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
<commit_msg>Add test for single-value fraction_illuminated()<commit_after>from skyfield import api, almanac
# http://aa.usno.navy.mil/cgi-bin/aa_moonill2.pl?form=1&year=2018&task=00&tz=-05
def test_fraction_illuminated():
ts = api.load.timescale()
t0 = ts.utc(2018, 9, range(9, 19), 5)
e = api.load('de421.bsp')
f = almanac.fraction_illuminated(e, 'moon', t0[-1]).round(2)
assert f == 0.62
f = almanac.fraction_illuminated(e, 'moon', t0).round(2)
assert (f == (0, 0, 0.03, 0.08, 0.15, 0.24, 0.33, 0.43, 0.52, 0.62)).all()
|
2cf9060db40e746eb49665b3eac83c72fd81d461
|
apigpio/utils.py
|
apigpio/utils.py
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if tick - self.last > threshold:
self._fn(*args, **kwargs)
self.last = tick
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
max_tick = 0xFFFFFFFF
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if self.last > tick:
delay = max_tick-self.last + tick
else:
delay = tick - self.last
if delay > threshold:
self._fn(*args, **kwargs)
print('call passed by debouncer {} {} {}'
.format(tick, self.last, threshold))
self.last = tick
else:
print('call filtered out by debouncer {} {} {}'
.format(tick, self.last, threshold))
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
Fix callback erroneously filtered out
|
Fix callback erroneously filtered out
The tick from pigpio wraps aroud after xFFFFFFFF,
approximately 1h13. When it wraps the delay was not computed
correctly, causing all following calls to be filtered out.
|
Python
|
mit
|
PierreRust/apigpio
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if tick - self.last > threshold:
self._fn(*args, **kwargs)
self.last = tick
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
Fix callback erroneously filtered out
The tick from pigpio wraps aroud after xFFFFFFFF,
approximately 1h13. When it wraps the delay was not computed
correctly, causing all following calls to be filtered out.
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
max_tick = 0xFFFFFFFF
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if self.last > tick:
delay = max_tick-self.last + tick
else:
delay = tick - self.last
if delay > threshold:
self._fn(*args, **kwargs)
print('call passed by debouncer {} {} {}'
.format(tick, self.last, threshold))
self.last = tick
else:
print('call filtered out by debouncer {} {} {}'
.format(tick, self.last, threshold))
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
<commit_before>import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if tick - self.last > threshold:
self._fn(*args, **kwargs)
self.last = tick
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
<commit_msg>Fix callback erroneously filtered out
The tick from pigpio wraps aroud after xFFFFFFFF,
approximately 1h13. When it wraps the delay was not computed
correctly, causing all following calls to be filtered out.<commit_after>
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
max_tick = 0xFFFFFFFF
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if self.last > tick:
delay = max_tick-self.last + tick
else:
delay = tick - self.last
if delay > threshold:
self._fn(*args, **kwargs)
print('call passed by debouncer {} {} {}'
.format(tick, self.last, threshold))
self.last = tick
else:
print('call filtered out by debouncer {} {} {}'
.format(tick, self.last, threshold))
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if tick - self.last > threshold:
self._fn(*args, **kwargs)
self.last = tick
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
Fix callback erroneously filtered out
The tick from pigpio wraps aroud after xFFFFFFFF,
approximately 1h13. When it wraps the delay was not computed
correctly, causing all following calls to be filtered out.import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
max_tick = 0xFFFFFFFF
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if self.last > tick:
delay = max_tick-self.last + tick
else:
delay = tick - self.last
if delay > threshold:
self._fn(*args, **kwargs)
print('call passed by debouncer {} {} {}'
.format(tick, self.last, threshold))
self.last = tick
else:
print('call filtered out by debouncer {} {} {}'
.format(tick, self.last, threshold))
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
<commit_before>import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if tick - self.last > threshold:
self._fn(*args, **kwargs)
self.last = tick
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
<commit_msg>Fix callback erroneously filtered out
The tick from pigpio wraps aroud after xFFFFFFFF,
approximately 1h13. When it wraps the delay was not computed
correctly, causing all following calls to be filtered out.<commit_after>import functools
def Debounce(threshold=100):
"""
Simple debouncing decorator for apigpio callbacks.
Example:
`@Debouncer()
def my_cb(gpio, level, tick)
print('gpio cb: {} {} {}'.format(gpio, level, tick))
`
The threshold can be given to the decorator as an argument (in millisec).
This decorator can be used both on function and object's methods.
Warning: as the debouncer uses the tick from pigpio, which wraps around
after approximately 1 hour 12 minutes, you could theoretically miss one
call if your callback is called twice with that interval.
"""
threshold *= 1000
max_tick = 0xFFFFFFFF
class _decorated(object):
def __init__(self, pigpio_cb):
self._fn = pigpio_cb
self.last = 0
self.is_method = False
def __call__(self, *args, **kwargs):
if self.is_method:
tick = args[3]
else:
tick = args[2]
if self.last > tick:
delay = max_tick-self.last + tick
else:
delay = tick - self.last
if delay > threshold:
self._fn(*args, **kwargs)
print('call passed by debouncer {} {} {}'
.format(tick, self.last, threshold))
self.last = tick
else:
print('call filtered out by debouncer {} {} {}'
.format(tick, self.last, threshold))
def __get__(self, instance, type=None):
# with is called when an instance of `_decorated` is used as a class
# attribute, which is the case when decorating a method in a class
self.is_method = True
return functools.partial(self, instance)
return _decorated
|
8a09e49cbcb9a874619b0e06601c2d69d5dad738
|
keystoneclient/__init__.py
|
keystoneclient/__init__.py
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
def _get_client_version():
"""Read version from versioninfo file."""
mod_abspath = inspect.getabsfile(inspect.currentframe())
client_path = os.path.dirname(mod_abspath)
version_path = os.path.join(client_path, 'versioninfo')
if os.path.exists(version_path):
version = open(version_path).read().strip()
else:
version = "Unknown, couldn't find versioninfo file at %s"\
% version_path
return version
__version__ = _get_client_version()
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('python-keystoneclient').version_string()
|
Fix --version to output version
|
Fix --version to output version
Change-Id: I7d8dc83ac7c2ad7519633d136c1c32ce8537dce8
Fixes: bug 1182675
|
Python
|
apache-2.0
|
alexpilotti/python-keystoneclient,alexpilotti/python-keystoneclient,jamielennox/python-keystoneclient,ntt-sic/python-keystoneclient,klmitch/python-keystoneclient,metacloud/python-keystoneclient,sdpp/python-keystoneclient,ging/python-keystoneclient,sdpp/python-keystoneclient,ging/python-keystoneclient,darren-wang/ksc,Mercador/python-keystoneclient,magic0704/python-keystoneclient,Mercador/python-keystoneclient,ntt-sic/python-keystoneclient,jamielennox/python-keystoneclient,citrix-openstack-build/python-keystoneclient,citrix-openstack-build/python-keystoneclient,citrix-openstack-build/python-keystoneclient,darren-wang/ksc,ntt-sic/python-keystoneclient,magic0704/python-keystoneclient,klmitch/python-keystoneclient,metacloud/python-keystoneclient,jamielennox/python-keystoneclient
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
def _get_client_version():
"""Read version from versioninfo file."""
mod_abspath = inspect.getabsfile(inspect.currentframe())
client_path = os.path.dirname(mod_abspath)
version_path = os.path.join(client_path, 'versioninfo')
if os.path.exists(version_path):
version = open(version_path).read().strip()
else:
version = "Unknown, couldn't find versioninfo file at %s"\
% version_path
return version
__version__ = _get_client_version()
Fix --version to output version
Change-Id: I7d8dc83ac7c2ad7519633d136c1c32ce8537dce8
Fixes: bug 1182675
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('python-keystoneclient').version_string()
|
<commit_before># Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
def _get_client_version():
"""Read version from versioninfo file."""
mod_abspath = inspect.getabsfile(inspect.currentframe())
client_path = os.path.dirname(mod_abspath)
version_path = os.path.join(client_path, 'versioninfo')
if os.path.exists(version_path):
version = open(version_path).read().strip()
else:
version = "Unknown, couldn't find versioninfo file at %s"\
% version_path
return version
__version__ = _get_client_version()
<commit_msg>Fix --version to output version
Change-Id: I7d8dc83ac7c2ad7519633d136c1c32ce8537dce8
Fixes: bug 1182675<commit_after>
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('python-keystoneclient').version_string()
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
def _get_client_version():
"""Read version from versioninfo file."""
mod_abspath = inspect.getabsfile(inspect.currentframe())
client_path = os.path.dirname(mod_abspath)
version_path = os.path.join(client_path, 'versioninfo')
if os.path.exists(version_path):
version = open(version_path).read().strip()
else:
version = "Unknown, couldn't find versioninfo file at %s"\
% version_path
return version
__version__ = _get_client_version()
Fix --version to output version
Change-Id: I7d8dc83ac7c2ad7519633d136c1c32ce8537dce8
Fixes: bug 1182675# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('python-keystoneclient').version_string()
|
<commit_before># Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
def _get_client_version():
"""Read version from versioninfo file."""
mod_abspath = inspect.getabsfile(inspect.currentframe())
client_path = os.path.dirname(mod_abspath)
version_path = os.path.join(client_path, 'versioninfo')
if os.path.exists(version_path):
version = open(version_path).read().strip()
else:
version = "Unknown, couldn't find versioninfo file at %s"\
% version_path
return version
__version__ = _get_client_version()
<commit_msg>Fix --version to output version
Change-Id: I7d8dc83ac7c2ad7519633d136c1c32ce8537dce8
Fixes: bug 1182675<commit_after># Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('python-keystoneclient').version_string()
|
c0640b6521d49e07681b0c43c2015c150dff32df
|
adhocracy/tests/model/test_user.py
|
adhocracy/tests/model/test_user.py
|
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
me.delegate_to_user_in_scope(delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
|
from adhocracy.model import Delegation
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
Delegation.create(me, delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
# fixme: atm that fails cause the user does not have the
# vote.cast permission.
|
Update "can delegate via forward on user" test. Still fails because of permissions
|
Update "can delegate via forward on user" test. Still fails because of permissions
|
Python
|
agpl-3.0
|
phihag/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,phihag/adhocracy,SysTheron/adhocracy,alkadis/vcv,SysTheron/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,phihag/adhocracy,phihag/adhocracy,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv
|
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
me.delegate_to_user_in_scope(delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
Update "can delegate via forward on user" test. Still fails because of permissions
|
from adhocracy.model import Delegation
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
Delegation.create(me, delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
# fixme: atm that fails cause the user does not have the
# vote.cast permission.
|
<commit_before>from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
me.delegate_to_user_in_scope(delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
<commit_msg>Update "can delegate via forward on user" test. Still fails because of permissions<commit_after>
|
from adhocracy.model import Delegation
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
Delegation.create(me, delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
# fixme: atm that fails cause the user does not have the
# vote.cast permission.
|
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
me.delegate_to_user_in_scope(delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
Update "can delegate via forward on user" test. Still fails because of permissionsfrom adhocracy.model import Delegation
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
Delegation.create(me, delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
# fixme: atm that fails cause the user does not have the
# vote.cast permission.
|
<commit_before>from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
me.delegate_to_user_in_scope(delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
<commit_msg>Update "can delegate via forward on user" test. Still fails because of permissions<commit_after>from adhocracy.model import Delegation
from adhocracy.tests import TestController
from adhocracy.tests.testtools import tt_make_proposal, tt_make_user
class TestUserController(TestController):
def test_can_delegate_via_forward_on_user(self):
proposal = tt_make_proposal(voting=True)
me = tt_make_user()
delegate = tt_make_user()
Delegation.create(me, delegate, proposal)
self.assertEqual(delegate.number_of_votes_in_scope(proposal), 2)
# fixme: atm that fails cause the user does not have the
# vote.cast permission.
|
0d2cb6f2091c01c9e57fd9b3c9d723b3e3d7080c
|
nova/objects/__init__.py
|
nova/objects/__init__.py
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
Add security_group_rule to objects registry
|
Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816
|
Python
|
apache-2.0
|
Metaswitch/calico-nova,barnsnake351/nova,MountainWei/nova,cloudbase/nova-virtualbox,hanlind/nova,maelnor/nova,jianghuaw/nova,zhimin711/nova,blueboxgroup/nova,TwinkleChawla/nova,dawnpower/nova,bgxavier/nova,thomasem/nova,j-carpentier/nova,tianweizhang/nova,openstack/nova,bigswitch/nova,openstack/nova,yosshy/nova,berrange/nova,phenoxim/nova,klmitch/nova,rajalokan/nova,rahulunair/nova,silenceli/nova,bigswitch/nova,mahak/nova,double12gzh/nova,yosshy/nova,varunarya10/nova_test_latest,luogangyi/bcec-nova,berrange/nova,mahak/nova,rahulunair/nova,cyx1231st/nova,shahar-stratoscale/nova,rahulunair/nova,alvarolopez/nova,affo/nova,virtualopensystems/nova,petrutlucian94/nova,belmiromoreira/nova,cloudbase/nova,devendermishrajio/nova,fnordahl/nova,mikalstill/nova,leilihh/novaha,alaski/nova,whitepages/nova,scripnichenko/nova,noironetworks/nova,zhimin711/nova,watonyweng/nova,ted-gould/nova,NeCTAR-RC/nova,Tehsmash/nova,mahak/nova,eharney/nova,CloudServer/nova,devendermishrajio/nova_test_latest,barnsnake351/nova,projectcalico/calico-nova,tudorvio/nova,alaski/nova,mmnelemane/nova,dawnpower/nova,orbitfp7/nova,blueboxgroup/nova,openstack/nova,vmturbo/nova,tealover/nova,cyx1231st/nova,watonyweng/nova,zzicewind/nova,tangfeixiong/nova,raildo/nova,dims/nova,apporc/nova,yatinkumbhare/openstack-nova,fnordahl/nova,alexandrucoman/vbox-nova-driver,rajalokan/nova,zaina/nova,CEG-FYP-OpenStack/scheduler,akash1808/nova,sebrandon1/nova,jianghuaw/nova,double12gzh/nova,eonpatapon/nova,Yusuke1987/openstack_template,tangfeixiong/nova,edulramirez/nova,Tehsmash/nova,mgagne/nova,j-carpentier/nova,viggates/nova,jianghuaw/nova,hanlind/nova,vladikr/nova_drafts,felixma/nova,scripnichenko/nova,JioCloud/nova_test_latest,cernops/nova,projectcalico/calico-nova,orbitfp7/nova,saleemjaveds/https-github.com-openstack-nova,affo/nova,tanglei528/nova,isyippee/nova,badock/nova,kimjaejoong/nova,sebrandon1/nova,thomasem/nova,mandeepdhami/nova,mandeepdhami/nova,takeshineshiro/nova,mikalstill/nova,isyippee/nova,tudorvio/nova,eayunstack/nova,joker946/nova,leilihh/nova,saleemjaveds/https-github.com-openstack-nova,klmitch/nova,dims/nova,ted-gould/nova,whitepages/nova,adelina-t/nova,mmnelemane/nova,Juniper/nova,edulramirez/nova,JioCloud/nova_test_latest,iuliat/nova,akash1808/nova_test_latest,JioCloud/nova,CiscoSystems/nova,badock/nova,mgagne/nova,akash1808/nova_test_latest,klmitch/nova,raildo/nova,alvarolopez/nova,BeyondTheClouds/nova,shail2810/nova,Juniper/nova,angdraug/nova,eayunstack/nova,MountainWei/nova,alexandrucoman/vbox-nova-driver,cloudbase/nova,nikesh-mahalka/nova,tealover/nova,CiscoSystems/nova,iuliat/nova,akash1808/nova,ruslanloman/nova,redhat-openstack/nova,Juniper/nova,LoHChina/nova,vladikr/nova_drafts,kimjaejoong/nova,noironetworks/nova,leilihh/novaha,leilihh/nova,Metaswitch/calico-nova,TwinkleChawla/nova,shail2810/nova,devendermishrajio/nova_test_latest,eharney/nova,shahar-stratoscale/nova,bgxavier/nova,gooddata/openstack-nova,maelnor/nova,jeffrey4l/nova,CCI-MOC/nova,mikalstill/nova,Francis-Liu/animated-broccoli,ruslanloman/nova,varunarya10/nova_test_latest,virtualopensystems/nova,CCI-MOC/nova,BeyondTheClouds/nova,Stavitsky/nova,JianyuWang/nova,cloudbase/nova-virtualbox,zaina/nova,devendermishrajio/nova,tanglei528/nova,apporc/nova,vmturbo/nova,rajalokan/nova,phenoxim/nova,JioCloud/nova,CEG-FYP-OpenStack/scheduler,BeyondTheClouds/nova,viggates/nova,Francis-Liu/animated-broccoli,Juniper/nova,gooddata/openstack-nova,cernops/nova,NeCTAR-RC/nova,klmitch/nova,redhat-openstack/nova,tianweizhang/nova,nikesh-mahalka/nova,silenceli/nova,angdraug/nova,Stavitsky/nova,cernops/nova,sebrandon1/nova,gooddata/openstack-nova,vmturbo/nova,zzicewind/nova,belmiromoreira/nova,eonpatapon/nova,CloudServer/nova,cloudbase/nova,joker946/nova,rajalokan/nova,gooddata/openstack-nova,spring-week-topos/nova-week,jeffrey4l/nova,Yusuke1987/openstack_template,felixma/nova,jianghuaw/nova,yatinkumbhare/openstack-nova,spring-week-topos/nova-week,hanlind/nova,vmturbo/nova,JianyuWang/nova,takeshineshiro/nova,LoHChina/nova,petrutlucian94/nova,luogangyi/bcec-nova,adelina-t/nova
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
<commit_before># Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
<commit_msg>Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816<commit_after>
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
<commit_before># Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
<commit_msg>Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816<commit_after># Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
638c1d92d82cfa5d8e9a3c5bfacee630282cc0a4
|
bot/multithreading/worker/pool/name_generator.py
|
bot/multithreading/worker/pool/name_generator.py
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(temp,max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at all
|
Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at all
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(temp,max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at all
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
<commit_before>class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(temp,max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
<commit_msg>Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at all<commit_after>
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(temp,max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at allclass WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
<commit_before>class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(temp,max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
<commit_msg>Remove "temp" from temporal workers, as they may have max_seconds_idle to None, not being temp at all<commit_after>class WorkerPoolNameGenerator:
def __init__(self, base_name: str, max_workers: int, max_seconds_idle: int):
self.base_name = base_name
self.max_workers = max_workers
self.max_seconds_idle = max_seconds_idle
def get_name(self, number: int, is_temporal: bool = False):
name = self.base_name + "#{current}/{max}".format(current=number, max=self.max_workers)
if is_temporal:
name += "(max_idle:{max_seconds_idle}s)".format(max_seconds_idle=self.max_seconds_idle)
return name
|
35f9c78274876c6eb1e487071c7957c9b8460f68
|
pecan/compat/__init__.py
|
pecan/compat/__init__.py
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
sig = inspect._signature_from_callable(func, follow_wrapper_chains=False,
skip_bound_arg=False,
sigcls=inspect.Signature)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
p.default is not p.empty)
] or None
if defaults is not None:
defaults = tuple(defaults)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
return ArgSpec(args, varargs, varkw, defaults)
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
args, varargs, keywords, defaults = inspect.getfullargspec(func)[:4]
return ArgSpec(args=args, varargs=varargs, keywords=keywords,
defaults=defaults)
|
Simplify our argspec compatability shim.
|
Simplify our argspec compatability shim.
|
Python
|
bsd-3-clause
|
pecan/pecan,ryanpetrello/pecan,pecan/pecan,ryanpetrello/pecan
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
sig = inspect._signature_from_callable(func, follow_wrapper_chains=False,
skip_bound_arg=False,
sigcls=inspect.Signature)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
p.default is not p.empty)
] or None
if defaults is not None:
defaults = tuple(defaults)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
return ArgSpec(args, varargs, varkw, defaults)
Simplify our argspec compatability shim.
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
args, varargs, keywords, defaults = inspect.getfullargspec(func)[:4]
return ArgSpec(args=args, varargs=varargs, keywords=keywords,
defaults=defaults)
|
<commit_before>import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
sig = inspect._signature_from_callable(func, follow_wrapper_chains=False,
skip_bound_arg=False,
sigcls=inspect.Signature)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
p.default is not p.empty)
] or None
if defaults is not None:
defaults = tuple(defaults)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
return ArgSpec(args, varargs, varkw, defaults)
<commit_msg>Simplify our argspec compatability shim.<commit_after>
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
args, varargs, keywords, defaults = inspect.getfullargspec(func)[:4]
return ArgSpec(args=args, varargs=varargs, keywords=keywords,
defaults=defaults)
|
import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
sig = inspect._signature_from_callable(func, follow_wrapper_chains=False,
skip_bound_arg=False,
sigcls=inspect.Signature)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
p.default is not p.empty)
] or None
if defaults is not None:
defaults = tuple(defaults)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
return ArgSpec(args, varargs, varkw, defaults)
Simplify our argspec compatability shim.import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
args, varargs, keywords, defaults = inspect.getfullargspec(func)[:4]
return ArgSpec(args=args, varargs=varargs, keywords=keywords,
defaults=defaults)
|
<commit_before>import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
sig = inspect._signature_from_callable(func, follow_wrapper_chains=False,
skip_bound_arg=False,
sigcls=inspect.Signature)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
p.default is not p.empty)
] or None
if defaults is not None:
defaults = tuple(defaults)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
return ArgSpec(args, varargs, varkw, defaults)
<commit_msg>Simplify our argspec compatability shim.<commit_after>import inspect
import six
if six.PY3:
import urllib.parse as urlparse
from urllib.parse import quote, unquote_plus
from urllib.request import urlopen, URLError
from html import escape
izip = zip
else:
import urlparse # noqa
from urllib import quote, unquote_plus # noqa
from urllib2 import urlopen, URLError # noqa
from cgi import escape # noqa
from itertools import izip
def is_bound_method(ob):
return inspect.ismethod(ob) and six.get_method_self(ob) is not None
def getargspec(func):
import sys
if sys.version_info < (3, 5):
return inspect.getargspec(func)
from collections import namedtuple
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
args, varargs, keywords, defaults = inspect.getfullargspec(func)[:4]
return ArgSpec(args=args, varargs=varargs, keywords=keywords,
defaults=defaults)
|
6432d92533953c2873b315945254e5260a109106
|
cs251tk/student/markdownify/check_submit_date.py
|
cs251tk/student/markdownify/check_submit_date.py
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
Modify way to find earliest date
|
Modify way to find earliest date
|
Python
|
mit
|
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
Modify way to find earliest date
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
<commit_before>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
<commit_msg>Modify way to find earliest date<commit_after>
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
Modify way to find earliest dateimport os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
<commit_before>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
<commit_msg>Modify way to find earliest date<commit_after>import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
1b3f253070739aea28ef5c8729dd641eddcb9323
|
src/artgraph/plugins/plugin.py
|
src/artgraph/plugins/plugin.py
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
clean_title = title.replace(" ", "_")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (clean_title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
Clean titles before querying the DB
|
Clean titles before querying the DB
|
Python
|
mit
|
dMaggot/ArtistGraph
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
Clean titles before querying the DB
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
clean_title = title.replace(" ", "_")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (clean_title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
<commit_before>import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
<commit_msg>Clean titles before querying the DB<commit_after>
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
clean_title = title.replace(" ", "_")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (clean_title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
Clean titles before querying the DBimport MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
clean_title = title.replace(" ", "_")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (clean_title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
<commit_before>import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
<commit_msg>Clean titles before querying the DB<commit_after>import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
clean_title = title.replace(" ", "_")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (clean_title))
return mwparserfromhell.parse(cursor.fetchone()[0])
|
f58940027a0e152ba68917a4b85dd1dfed1095a9
|
appname/server.py
|
appname/server.py
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
Add data: and unsafe-local for base64 fonts and inline js
|
Add data: and unsafe-local for base64 fonts and inline js
|
Python
|
mit
|
LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
Add data: and unsafe-local for base64 fonts and inline js
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
<commit_before>from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
<commit_msg>Add data: and unsafe-local for base64 fonts and inline js<commit_after>
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
Add data: and unsafe-local for base64 fonts and inline jsfrom flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
<commit_before>from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
<commit_msg>Add data: and unsafe-local for base64 fonts and inline js<commit_after>from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
32f06a7d3fc14600792a07bf00fab60af4ac395a
|
src/dashboard/src/contrib/utils.py
|
src/dashboard/src/contrib/utils.py
|
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
from django.shortcuts import render_to_response
from django.template.context import RequestContext
def render(request, template, context={}):
return render_to_response(template, context, context_instance=RequestContext(request))
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
Add wrapper for render_to_resposne to include tmpl context processors easily
|
Add wrapper for render_to_resposne to include tmpl context processors easily
Autoconverted from SVN (revision:2231)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
Add wrapper for render_to_resposne to include tmpl context processors easily
Autoconverted from SVN (revision:2231)
|
from django.shortcuts import render_to_response
from django.template.context import RequestContext
def render(request, template, context={}):
return render_to_response(template, context, context_instance=RequestContext(request))
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
<commit_before>def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
<commit_msg>Add wrapper for render_to_resposne to include tmpl context processors easily
Autoconverted from SVN (revision:2231)<commit_after>
|
from django.shortcuts import render_to_response
from django.template.context import RequestContext
def render(request, template, context={}):
return render_to_response(template, context, context_instance=RequestContext(request))
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
Add wrapper for render_to_resposne to include tmpl context processors easily
Autoconverted from SVN (revision:2231)from django.shortcuts import render_to_response
from django.template.context import RequestContext
def render(request, template, context={}):
return render_to_response(template, context, context_instance=RequestContext(request))
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
<commit_before>def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
<commit_msg>Add wrapper for render_to_resposne to include tmpl context processors easily
Autoconverted from SVN (revision:2231)<commit_after>from django.shortcuts import render_to_response
from django.template.context import RequestContext
def render(request, template, context={}):
return render_to_response(template, context, context_instance=RequestContext(request))
def get_directory_name(job):
"""
Expected format:
%sharedPath%watchedDirectories/workFlowDecisions/createDip/ImagesSIP-69826e50-87a2-4370-b7bd-406fc8aad94f/
"""
import re
directory = job.directory
uuid = job.sipuuid
try:
return re.search(r'^.*/(?P<directory>.*)-[\w]{8}(-[\w]{4}){3}-[\w]{12}[/]{0,1}$', directory).group('directory')
except:
pass
try:
return re.search(r'^.*/(?P<directory>.*)/$', directory).group('directory')
except:
pass
if directory:
return directory
else:
return uuid
|
d15c830111987388bec89c2549a16b809d656a83
|
jarn/mkrelease/scp.py
|
jarn/mkrelease/scp.py
|
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def has_host(self, location):
colon = location.find(':')
slash = location.find('/')
return colon > 0 and (slash < 0 or slash > colon)
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'copying to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
|
from tempfile import NamedTemporaryFile
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
cmdfile = file.name
rc = self.process.os_system(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('sftp failed')
return rc
|
Add run_sftp and remove URL manipulation methods from SCP.
|
Add run_sftp and remove URL manipulation methods from SCP.
|
Python
|
bsd-2-clause
|
Jarn/jarn.mkrelease
|
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def has_host(self, location):
colon = location.find(':')
slash = location.find('/')
return colon > 0 and (slash < 0 or slash > colon)
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'copying to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
Add run_sftp and remove URL manipulation methods from SCP.
|
from tempfile import NamedTemporaryFile
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
cmdfile = file.name
rc = self.process.os_system(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('sftp failed')
return rc
|
<commit_before>from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def has_host(self, location):
colon = location.find(':')
slash = location.find('/')
return colon > 0 and (slash < 0 or slash > colon)
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'copying to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
<commit_msg>Add run_sftp and remove URL manipulation methods from SCP.<commit_after>
|
from tempfile import NamedTemporaryFile
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
cmdfile = file.name
rc = self.process.os_system(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('sftp failed')
return rc
|
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def has_host(self, location):
colon = location.find(':')
slash = location.find('/')
return colon > 0 and (slash < 0 or slash > colon)
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'copying to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
Add run_sftp and remove URL manipulation methods from SCP.from tempfile import NamedTemporaryFile
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
cmdfile = file.name
rc = self.process.os_system(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('sftp failed')
return rc
|
<commit_before>from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def has_host(self, location):
colon = location.find(':')
slash = location.find('/')
return colon > 0 and (slash < 0 or slash > colon)
def join(self, distbase, location):
sep = ''
if distbase and distbase[-1] not in (':', '/'):
sep = '/'
return distbase + sep + location
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'copying to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
<commit_msg>Add run_sftp and remove URL manipulation methods from SCP.<commit_after>from tempfile import NamedTemporaryFile
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
cmdfile = file.name
rc = self.process.os_system(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('sftp failed')
return rc
|
c2d7f4c6ae9042d1cc7f11fa82d7133e9b506ad7
|
src/main/scripts/data_exports/export_json.py
|
src/main/scripts/data_exports/export_json.py
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w') as outfile:
json.dump(places, outfile)
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w', encoding='utf-8') as outfile:
json.dump(places, outfile, ensure_ascii=False)
|
Fix UTF-8 encoding for json exports
|
Fix UTF-8 encoding for json exports
|
Python
|
apache-2.0
|
dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w') as outfile:
json.dump(places, outfile)
Fix UTF-8 encoding for json exports
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w', encoding='utf-8') as outfile:
json.dump(places, outfile, ensure_ascii=False)
|
<commit_before>from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w') as outfile:
json.dump(places, outfile)
<commit_msg>Fix UTF-8 encoding for json exports<commit_after>
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w', encoding='utf-8') as outfile:
json.dump(places, outfile, ensure_ascii=False)
|
from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w') as outfile:
json.dump(places, outfile)
Fix UTF-8 encoding for json exportsfrom lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w', encoding='utf-8') as outfile:
json.dump(places, outfile, ensure_ascii=False)
|
<commit_before>from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w') as outfile:
json.dump(places, outfile)
<commit_msg>Fix UTF-8 encoding for json exports<commit_after>from lib.harvester import Harvester
from lib.cli_helper import is_writable_directory
import argparse
import logging
import json
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s")
parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.")
parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json",
help="specify output file, default: './gazetteer_export.json'")
parser.add_argument('-p', '--polygons', action='store_true',
help="export place shape polygons, this will increase the file size significantly")
if __name__ == "__main__":
options = vars(parser.parse_args())
harvester = Harvester(options['polygons'])
places = harvester.get_data()
with open(options['target'], 'w', encoding='utf-8') as outfile:
json.dump(places, outfile, ensure_ascii=False)
|
2f3139b2dfa2662daa7e57b221836ff2923c5fc9
|
actstream/admin.py
|
actstream/admin.py
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
Add 'public' field to ActionAdmin list display
|
Add 'public' field to ActionAdmin list display
|
Python
|
mit
|
druss16/danslist,Shanto/django-activity-stream,jimlyndon/django-activity-stream,intelivix/django-activity-stream,pombredanne/django-activity-stream,github-account-because-they-want-it/django-activity-stream,thelabnyc/django-activity-stream,github-account-because-they-want-it/django-activity-stream,pknowles/django-activity-stream,druss16/danslist,justquick/django-activity-stream,pombredanne/django-activity-stream,druss16/danslist,Shanto/django-activity-stream,jimlyndon/django-activity-stream,pknowles/django-activity-stream,jrsupplee/django-activity-stream,jrsupplee/django-activity-stream,intelivix/django-activity-stream,thelabnyc/django-activity-stream,justquick/django-activity-stream
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
Add 'public' field to ActionAdmin list display
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
<commit_before>from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
<commit_msg>Add 'public' field to ActionAdmin list display<commit_after>
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
Add 'public' field to ActionAdmin list displayfrom django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
<commit_before>from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
<commit_msg>Add 'public' field to ActionAdmin list display<commit_after>from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
7741968b9d48afc7ac135742774ae911e2611c83
|
tests/test_util.py
|
tests/test_util.py
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
Cover case when seq is oneven
|
Cover case when seq is oneven
|
Python
|
mit
|
CodersOfTheNight/verata
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
Cover case when seq is oneven
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
<commit_before>from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
<commit_msg>Cover case when seq is oneven<commit_after>
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
Cover case when seq is onevenfrom grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
<commit_before>from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
<commit_msg>Cover case when seq is oneven<commit_after>from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
ef060a18216d652df6efa866b6433102262831d8
|
tests/test_util.py
|
tests/test_util.py
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.close_called
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
Remove close_called test (no longer supported in python 3)
|
Remove close_called test (no longer supported in python 3)
|
Python
|
bsd-3-clause
|
justinsalamon/scaper
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.close_called
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
Remove close_called test (no longer supported in python 3)
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
<commit_before># CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.close_called
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
<commit_msg>Remove close_called test (no longer supported in python 3)<commit_after>
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.close_called
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
Remove close_called test (no longer supported in python 3)# CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
<commit_before># CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.close_called
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
<commit_msg>Remove close_called test (no longer supported in python 3)<commit_after># CREATED: 10/15/16 7:52 PM by Justin Salamon <justin.salamon@nyu.edu>
'''
Tests for functions in util.py
'''
from scaper.util import _close_temp_files
from scaper.util import _set_temp_logging_level
import tempfile
import os
import logging
def test_close_temp_files():
'''
Create a bunch of temp files and then make sure they've been closed and
deleted.
'''
tmpfiles = []
with _close_temp_files(tmpfiles):
for _ in range(5):
tmpfiles.append(
tempfile.NamedTemporaryFile(suffix='.wav', delete=True))
for tf in tmpfiles:
assert tf.file.closed
assert not os.path.isfile(tf.name)
def test_set_temp_logging_level():
'''
Ensure temp logging level is set as expected
'''
logger = logging.getLogger()
logger.setLevel('DEBUG')
with _set_temp_logging_level('CRITICAL'):
assert logging.getLevelName(logger.level) == 'CRITICAL'
assert logging.getLevelName(logger.level) == 'DEBUG'
|
aa6df5b1ca4801cdaa85f7546c292be4f34e0107
|
test/pyrostest/test_system.py
|
test/pyrostest/test_system.py
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def launches_node(self):
pass
class FailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def no_launch_package(self):
pass
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def test_noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def test_launches_node(self):
pass
class TestFailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def test_no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def test_no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def test_no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def test_no_launch_package(self):
pass
|
Rename tests so that they run.
|
Rename tests so that they run.
|
Python
|
mit
|
gtagency/pyrostest,gtagency/pyrostest
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def launches_node(self):
pass
class FailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def no_launch_package(self):
pass
Rename tests so that they run.
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def test_noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def test_launches_node(self):
pass
class TestFailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def test_no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def test_no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def test_no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def test_no_launch_package(self):
pass
|
<commit_before>import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def launches_node(self):
pass
class FailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def no_launch_package(self):
pass
<commit_msg>Rename tests so that they run.<commit_after>
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def test_noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def test_launches_node(self):
pass
class TestFailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def test_no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def test_no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def test_no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def test_no_launch_package(self):
pass
|
import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def launches_node(self):
pass
class FailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def no_launch_package(self):
pass
Rename tests so that they run.import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def test_noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def test_launches_node(self):
pass
class TestFailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def test_no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def test_no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def test_no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def test_no_launch_package(self):
pass
|
<commit_before>import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def launches_node(self):
pass
class FailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def no_launch_package(self):
pass
<commit_msg>Rename tests so that they run.<commit_after>import pytest
import pyrostest
class TestSpinUp(pyrostest.RosTest):
def test_noop(self):
pass
@pyrostest.launch_node('pyrostest', 'add_one.py')
def test_launches_node(self):
pass
class TestFailureCases(pyrostest.RosTest):
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('this_isnt_a_project', 'add_one.py')
def test_no_rospackage(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.launch_node('pyrostest', 'this_isnt_a_rosnode.py')
def test_no_node(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('pyrostest', 'does_not_exist')
def test_no_launch_file(self):
pass
@pytest.mark.xfail(strict=True)
@pyrostest.with_launch_file('not_a_package', 'exists')
def test_no_launch_package(self):
pass
|
162700c488275b057964f16659f3846c930a0c4f
|
registration/__init__.py
|
registration/__init__.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
Add utility function for retrieving the active registration backend.
|
Add utility function for retrieving the active registration backend.
|
Python
|
bsd-3-clause
|
aptivate/django-registration,rafaduran/django-pluggable-registration,QPmedia/django-registration,QPmedia/django-registration,siddharthsarda/django-registration,maraujop/django-registration,CoatedMoose/django-registration,CoatedMoose/django-registration,newvem/django-registration,christang/django-registration-1.5,thedod/django-registration-hg-mirror,fedenko/django-registration,rbarrois/django-registration,newvem/django-registration,AndrewLvov/django-registration,christang/django-registration-1.5,pelletier/django-registration-81,AndrewLvov/django-registration,rbarrois/django-registration,fedenko/django-registration,aptivate/django-registration
|
Add utility function for retrieving the active registration backend.
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
<commit_before><commit_msg>Add utility function for retrieving the active registration backend.<commit_after>
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
Add utility function for retrieving the active registration backend.from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
<commit_before><commit_msg>Add utility function for retrieving the active registration backend.<commit_after>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
|
ac7477803739d303df8374f916748173da32cb07
|
test_elasticsearch/test_server/__init__.py
|
test_elasticsearch/test_server/__init__.py
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client():
global client
if client is not None:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
client = local_get_client()
except ImportError:
# fallback to using vanilla client
client = get_test_client()
return client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client():
return get_client()
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client(**kwargs):
global client
if client is not None and not kwargs:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
new_client = local_get_client(**kwargs)
except ImportError:
# fallback to using vanilla client
new_client = get_test_client(**kwargs)
if not kwargs:
client = new_client
return new_client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client(**kwargs):
return get_client(**kwargs)
|
Allow test client to be created with kwargs
|
Allow test client to be created with kwargs
|
Python
|
apache-2.0
|
brunobell/elasticsearch-py,elastic/elasticsearch-py,brunobell/elasticsearch-py,elastic/elasticsearch-py
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client():
global client
if client is not None:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
client = local_get_client()
except ImportError:
# fallback to using vanilla client
client = get_test_client()
return client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client():
return get_client()
Allow test client to be created with kwargs
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client(**kwargs):
global client
if client is not None and not kwargs:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
new_client = local_get_client(**kwargs)
except ImportError:
# fallback to using vanilla client
new_client = get_test_client(**kwargs)
if not kwargs:
client = new_client
return new_client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client(**kwargs):
return get_client(**kwargs)
|
<commit_before>from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client():
global client
if client is not None:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
client = local_get_client()
except ImportError:
# fallback to using vanilla client
client = get_test_client()
return client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client():
return get_client()
<commit_msg>Allow test client to be created with kwargs<commit_after>
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client(**kwargs):
global client
if client is not None and not kwargs:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
new_client = local_get_client(**kwargs)
except ImportError:
# fallback to using vanilla client
new_client = get_test_client(**kwargs)
if not kwargs:
client = new_client
return new_client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client(**kwargs):
return get_client(**kwargs)
|
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client():
global client
if client is not None:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
client = local_get_client()
except ImportError:
# fallback to using vanilla client
client = get_test_client()
return client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client():
return get_client()
Allow test client to be created with kwargsfrom elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client(**kwargs):
global client
if client is not None and not kwargs:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
new_client = local_get_client(**kwargs)
except ImportError:
# fallback to using vanilla client
new_client = get_test_client(**kwargs)
if not kwargs:
client = new_client
return new_client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client(**kwargs):
return get_client(**kwargs)
|
<commit_before>from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client():
global client
if client is not None:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
client = local_get_client()
except ImportError:
# fallback to using vanilla client
client = get_test_client()
return client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client():
return get_client()
<commit_msg>Allow test client to be created with kwargs<commit_after>from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase
client = None
def get_client(**kwargs):
global client
if client is not None and not kwargs:
return client
# try and locate manual override in the local environment
try:
from test_elasticsearch.local import get_client as local_get_client
new_client = local_get_client(**kwargs)
except ImportError:
# fallback to using vanilla client
new_client = get_test_client(**kwargs)
if not kwargs:
client = new_client
return new_client
def setup():
get_client()
class ElasticsearchTestCase(BaseTestCase):
@staticmethod
def _get_client(**kwargs):
return get_client(**kwargs)
|
6d5edb8a5eacfb2dc83a2eef5732562024995942
|
api/serializers.py
|
api/serializers.py
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
if 'is_school' in data and data['is_school']:
error_dict = {}
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
error_dict = {}
if 'is_school' in data and data['is_school']:
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
Fix bug with registering non-school teams
|
Fix bug with registering non-school teams
|
Python
|
bsd-3-clause
|
stefantsov/blackbox3,stefantsov/blackbox3,stefantsov/blackbox3
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
if 'is_school' in data and data['is_school']:
error_dict = {}
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
Fix bug with registering non-school teams
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
error_dict = {}
if 'is_school' in data and data['is_school']:
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
<commit_before>from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
if 'is_school' in data and data['is_school']:
error_dict = {}
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
<commit_msg>Fix bug with registering non-school teams<commit_after>
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
error_dict = {}
if 'is_school' in data and data['is_school']:
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
if 'is_school' in data and data['is_school']:
error_dict = {}
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
Fix bug with registering non-school teamsfrom django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
error_dict = {}
if 'is_school' in data and data['is_school']:
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
<commit_before>from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
if 'is_school' in data and data['is_school']:
error_dict = {}
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
<commit_msg>Fix bug with registering non-school teams<commit_after>from django.utils.translation import ugettext as _
from rest_framework.serializers import ModelSerializer, ValidationError
from reg.models import Team
class TeamSerializer(ModelSerializer):
def validate(self, data):
error_dict = {}
if 'is_school' in data and data['is_school']:
if 'school_name' not in data or not data['school_name'].strip():
error_dict['school_name'] = [_('The field is required for school teams')]
if 'teacher_name' not in data or not data['teacher_name'].strip():
error_dict['teacher_name'] = [_('The field is required for school teams')]
if 'teacher_email' not in data or not data['teacher_email'].strip():
error_dict['teacher_email'] = [_('The field is required for school teams')]
if 'address' not in data or not data['address'].strip():
error_dict['address'] = [_('The field is required for school teams')]
if len(error_dict) > 0:
raise ValidationError(error_dict)
return data
class Meta:
model = Team
exclude = ('auth_string',)
read_only_fields = ('id', 'created_at')
|
91049834c3f30dcb838ac45167e93aa1bc92a913
|
fluent_faq/pagetypes/faqpage/page_type_plugins.py
|
fluent_faq/pagetypes/faqpage/page_type_plugins.py
|
from fluent_pages.admin import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
from fluent_pages.adminui import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
Prepare Django 1.7 compatibility, use new fluent_pages.adminui module
|
Prepare Django 1.7 compatibility, use new fluent_pages.adminui module
|
Python
|
apache-2.0
|
edoburu/django-fluent-faq,edoburu/django-fluent-faq
|
from fluent_pages.admin import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
Prepare Django 1.7 compatibility, use new fluent_pages.adminui module
|
from fluent_pages.adminui import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
<commit_before>from fluent_pages.admin import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
<commit_msg>Prepare Django 1.7 compatibility, use new fluent_pages.adminui module<commit_after>
|
from fluent_pages.adminui import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
from fluent_pages.admin import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
Prepare Django 1.7 compatibility, use new fluent_pages.adminui modulefrom fluent_pages.adminui import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
<commit_before>from fluent_pages.admin import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
<commit_msg>Prepare Django 1.7 compatibility, use new fluent_pages.adminui module<commit_after>from fluent_pages.adminui import HtmlPageAdmin
from fluent_pages.extensions import page_type_pool, PageTypePlugin
from .models import FaqPage
@page_type_pool.register
class FaqPagePlugin(PageTypePlugin):
"""
Plugin binding the FaqPage model as pagetype.
"""
model = FaqPage
model_admin = HtmlPageAdmin
urls = 'fluent_faq.urls'
|
b3d0f710de7982877fb2c30c46c75de86262caf4
|
grako/rendering.py
|
grako/rendering.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(self.template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
Allow render to take a template different from the default one.
|
Allow render to take a template different from the default one.
|
Python
|
bsd-2-clause
|
swayf/grako,swayf/grako
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(self.template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
Allow render to take a template different from the default one.
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(self.template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
<commit_msg>Allow render to take a template different from the default one.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(self.template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
Allow render to take a template different from the default one.# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(self.template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
<commit_msg>Allow render to take a template different from the default one.<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
1690959502e2951920e52a0832e6571144bab6a8
|
_lib/wordpress_faq_processor.py
|
_lib/wordpress_faq_processor.py
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return post
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return {'_index': 'content',
'_type': 'faq',
'_id': post['slug'],
'_source': post}
|
Change faq processor to bulk index
|
Change faq processor to bulk index
|
Python
|
cc0-1.0
|
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return post
Change faq processor to bulk index
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return {'_index': 'content',
'_type': 'faq',
'_id': post['slug'],
'_source': post}
|
<commit_before>import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return post
<commit_msg>Change faq processor to bulk index<commit_after>
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return {'_index': 'content',
'_type': 'faq',
'_id': post['slug'],
'_source': post}
|
import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return post
Change faq processor to bulk indeximport sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return {'_index': 'content',
'_type': 'faq',
'_id': post['slug'],
'_source': post}
|
<commit_before>import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return post
<commit_msg>Change faq processor to bulk index<commit_after>import sys
import json
import os.path
import requests
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
url = os.path.expandvars(url)
resp = requests.get(url, params={'page': current_page, 'count': '-1'})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
post['_id'] = post['slug']
names = ['og_title', 'og_image', 'og_desc', 'twtr_text', 'twtr_lang',
'twtr_rel', 'twtr_hash', 'utm_campaign', 'utm_term',
'utm_content', 'faq']
for name in names:
if name in post['custom_fields']:
post[name] = post['custom_fields'][name]
if 'taxonomy_fj_tag' in post:
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
del post['custom_fields']
return {'_index': 'content',
'_type': 'faq',
'_id': post['slug'],
'_source': post}
|
c4e497f24818169e8c59c07246582223c8214e45
|
bitfield/forms.py
|
bitfield/forms.py
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
Python
|
apache-2.0
|
moggers87/django-bitfield,joshowen/django-bitfield,Elec/django-bitfield,budlight/django-bitfield,disqus/django-bitfield
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
<commit_before>from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
<commit_msg>Allow values of BitFormField's to be integers (for legacy compatibility in some apps)<commit_after>
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
Allow values of BitFormField's to be integers (for legacy compatibility in some apps)from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
<commit_before>from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
<commit_msg>Allow values of BitFormField's to be integers (for legacy compatibility in some apps)<commit_after>from django.forms import CheckboxSelectMultiple, IntegerField, ValidationError
from django.utils.encoding import force_unicode
from .types import BitHandler
class BitFieldCheckboxSelectMultiple(CheckboxSelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if isinstance(value, BitHandler):
value = [k for k, v in value if v]
return super(BitFieldCheckboxSelectMultiple, self).render(
name, value, attrs=attrs, choices=enumerate(choices))
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if initial != data:
return True
initial_set = set([force_unicode(value) for value in initial])
data_set = set([force_unicode(value) for value in data])
return data_set != initial_set
class BitFormField(IntegerField):
"""
'choices' should be a flat list of flags (just as BitField
accepts them).
"""
def __init__(self, choices=(), widget=BitFieldCheckboxSelectMultiple, *args, **kwargs):
self.widget = widget
self.choices = self.widget.choices = choices
super(BitFormField, self).__init__(*args, **kwargs)
def clean(self, value):
if not value:
return 0
if isinstance(value, int):
result = BitHandler(value, [k for k, v in self.choices])
else:
result = BitHandler(0, [k for k, v in self.choices])
for k in value:
try:
setattr(result, str(k), True)
except AttributeError:
raise ValidationError('Unknown choice')
return int(result)
|
19da6c14a5063d3d0361b9b887fd0e4ed8d7a83d
|
nflpool/data/seasoninfo.py
|
nflpool/data/seasoninfo.py
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
Update SeasonInfo database table info
|
Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season
|
Python
|
mit
|
prcutler/nflpool,prcutler/nflpool
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
<commit_before>from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
<commit_msg>Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season<commit_after>
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the seasonfrom nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
<commit_before>from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
<commit_msg>Update SeasonInfo database table info
Add columns for the first game star time, home and away teams for the
first NFL game played of the season<commit_after>from nflpool.data.modelbase import SqlAlchemyBase
import sqlalchemy
class SeasonInfo(SqlAlchemyBase):
__tablename__ = 'SeasonInfo'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
current_season = sqlalchemy.Column(sqlalchemy.Integer)
season_start_date = sqlalchemy.Column(sqlalchemy.Integer)
season_start_time = sqlalchemy.Column(sqlalchemy.String)
home_team = sqlalchemy.Column(sqlalchemy.String)
away_team = sqlalchemy.Column(sqlalchemy.String)
|
0428522c8df724ce49a32686676b2c5345abfda9
|
sdklib/util/timetizer.py
|
sdklib/util/timetizer.py
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf():
t = datetime.date.today()
return t.strftime("%d/%m/%Y")
def tomorrow_strf():
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def yesterday_strf():
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf(format="%d/%m/%Y"):
t = datetime.date.today()
return t.strftime(format)
def tomorrow_strf(format="%d/%m/%Y"):
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime(format)
def yesterday_strf(format="%d/%m/%Y"):
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime(format)
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
Add format parameter to strf functions
|
Add format parameter to strf functions
|
Python
|
bsd-2-clause
|
ivanprjcts/sdklib,ivanprjcts/sdklib
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf():
t = datetime.date.today()
return t.strftime("%d/%m/%Y")
def tomorrow_strf():
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def yesterday_strf():
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
Add format parameter to strf functions
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf(format="%d/%m/%Y"):
t = datetime.date.today()
return t.strftime(format)
def tomorrow_strf(format="%d/%m/%Y"):
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime(format)
def yesterday_strf(format="%d/%m/%Y"):
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime(format)
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
<commit_before>import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf():
t = datetime.date.today()
return t.strftime("%d/%m/%Y")
def tomorrow_strf():
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def yesterday_strf():
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
<commit_msg>Add format parameter to strf functions<commit_after>
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf(format="%d/%m/%Y"):
t = datetime.date.today()
return t.strftime(format)
def tomorrow_strf(format="%d/%m/%Y"):
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime(format)
def yesterday_strf(format="%d/%m/%Y"):
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime(format)
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf():
t = datetime.date.today()
return t.strftime("%d/%m/%Y")
def tomorrow_strf():
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def yesterday_strf():
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
Add format parameter to strf functionsimport time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf(format="%d/%m/%Y"):
t = datetime.date.today()
return t.strftime(format)
def tomorrow_strf(format="%d/%m/%Y"):
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime(format)
def yesterday_strf(format="%d/%m/%Y"):
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime(format)
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
<commit_before>import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf():
t = datetime.date.today()
return t.strftime("%d/%m/%Y")
def tomorrow_strf():
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def yesterday_strf():
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime("%d/%m/%Y")
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
<commit_msg>Add format parameter to strf functions<commit_after>import time
import datetime
def get_current_utc(time_format="%Y-%m-%d %H:%M:%S"):
"""
@return a string representation of the current time in UTC.
"""
return time.strftime(time_format, time.gmtime())
def today_strf(format="%d/%m/%Y"):
t = datetime.date.today()
return t.strftime(format)
def tomorrow_strf(format="%d/%m/%Y"):
t = datetime.date.today() + datetime.timedelta(days=1)
return t.strftime(format)
def yesterday_strf(format="%d/%m/%Y"):
t = datetime.date.today() - datetime.timedelta(days=1)
return t.strftime(format)
def seconds_to_milliseconds_timestamp(seconds_timestamp):
return int(round(seconds_timestamp * 1000))
def current_milliseconds_timestamp():
return seconds_to_milliseconds_timestamp(time.time())
def datetime_to_milliseconds_timestamp(datetime_obj):
seconds_timestamp = time.mktime(datetime_obj.timetuple())
return seconds_to_milliseconds_timestamp(seconds_timestamp)
|
22751313f6e221c009aeb0673e531894d1645c41
|
examples/settings.py
|
examples/settings.py
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
Use 1080p as default window size
|
Use 1080p as default window size
|
Python
|
isc
|
Contraz/demosys-py
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
Use 1080p as default window size
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
<commit_before>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
<commit_msg>Use 1080p as default window size<commit_after>
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
Use 1080p as default window sizeimport os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
<commit_before>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
<commit_msg>Use 1080p as default window size<commit_after>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
c04872d00a26e9bf0f48eeacb360b37ce0fba01e
|
semantic_release/pypi.py
|
semantic_release/pypi.py
|
"""PyPI
"""
from invoke import run
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
dists=['dist/*'],
sign=False,
identity=None,
username=username,
password=password,
comment=None,
sign_with='gpg',
config_file='~/.pypirc',
skip_existing=skip_existing,
cert=None,
client_cert=None,
repository_url=None
)
run('rm -rf build dist')
|
"""PyPI
"""
from invoke import run
from twine import settings
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
settings.Settings(
username=username,
password=password,
skip_existing=skip_existing,
),
['dist/*'],
)
run('rm -rf build dist')
|
Use new interface for twine
|
fix: Use new interface for twine
|
Python
|
mit
|
relekang/python-semantic-release,relekang/python-semantic-release
|
"""PyPI
"""
from invoke import run
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
dists=['dist/*'],
sign=False,
identity=None,
username=username,
password=password,
comment=None,
sign_with='gpg',
config_file='~/.pypirc',
skip_existing=skip_existing,
cert=None,
client_cert=None,
repository_url=None
)
run('rm -rf build dist')
fix: Use new interface for twine
|
"""PyPI
"""
from invoke import run
from twine import settings
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
settings.Settings(
username=username,
password=password,
skip_existing=skip_existing,
),
['dist/*'],
)
run('rm -rf build dist')
|
<commit_before>"""PyPI
"""
from invoke import run
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
dists=['dist/*'],
sign=False,
identity=None,
username=username,
password=password,
comment=None,
sign_with='gpg',
config_file='~/.pypirc',
skip_existing=skip_existing,
cert=None,
client_cert=None,
repository_url=None
)
run('rm -rf build dist')
<commit_msg>fix: Use new interface for twine<commit_after>
|
"""PyPI
"""
from invoke import run
from twine import settings
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
settings.Settings(
username=username,
password=password,
skip_existing=skip_existing,
),
['dist/*'],
)
run('rm -rf build dist')
|
"""PyPI
"""
from invoke import run
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
dists=['dist/*'],
sign=False,
identity=None,
username=username,
password=password,
comment=None,
sign_with='gpg',
config_file='~/.pypirc',
skip_existing=skip_existing,
cert=None,
client_cert=None,
repository_url=None
)
run('rm -rf build dist')
fix: Use new interface for twine"""PyPI
"""
from invoke import run
from twine import settings
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
settings.Settings(
username=username,
password=password,
skip_existing=skip_existing,
),
['dist/*'],
)
run('rm -rf build dist')
|
<commit_before>"""PyPI
"""
from invoke import run
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
dists=['dist/*'],
sign=False,
identity=None,
username=username,
password=password,
comment=None,
sign_with='gpg',
config_file='~/.pypirc',
skip_existing=skip_existing,
cert=None,
client_cert=None,
repository_url=None
)
run('rm -rf build dist')
<commit_msg>fix: Use new interface for twine<commit_after>"""PyPI
"""
from invoke import run
from twine import settings
from twine.commands import upload as twine_upload
def upload_to_pypi(
dists: str = 'sdist bdist_wheel',
username: str = None,
password: str = None,
skip_existing: bool = False
):
"""Creates the wheel and uploads to pypi with twine.
:param dists: The dists string passed to setup.py. Default: 'bdist_wheel'
:param username: PyPI account username string
:param password: PyPI account password string
:param skip_existing: Continue uploading files if one already exists. (Only valid when
uploading to PyPI. Other implementations may not support this.)
"""
run('python setup.py {}'.format(dists))
twine_upload.upload(
settings.Settings(
username=username,
password=password,
skip_existing=skip_existing,
),
['dist/*'],
)
run('rm -rf build dist')
|
fd0dad58403f34338b85edd83641e65a68779705
|
casslist/views.py
|
casslist/views.py
|
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
from django.db import OperationalError
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
try:
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
except OperationalError: # The database is empty.
total_plays = 0
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
Fix error when making first migrations in a new project
|
[casslist] Fix error when making first migrations in a new project
|
Python
|
mit
|
joshuaprince/Cassoundra,joshuaprince/Cassoundra,joshuaprince/Cassoundra
|
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
def get_queryset(self):
return models.Sound.objects.order_by('-id')
[casslist] Fix error when making first migrations in a new project
|
from django.db import OperationalError
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
try:
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
except OperationalError: # The database is empty.
total_plays = 0
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
<commit_before>from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
def get_queryset(self):
return models.Sound.objects.order_by('-id')
<commit_msg>[casslist] Fix error when making first migrations in a new project<commit_after>
|
from django.db import OperationalError
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
try:
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
except OperationalError: # The database is empty.
total_plays = 0
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
def get_queryset(self):
return models.Sound.objects.order_by('-id')
[casslist] Fix error when making first migrations in a new projectfrom django.db import OperationalError
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
try:
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
except OperationalError: # The database is empty.
total_plays = 0
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
<commit_before>from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
def get_queryset(self):
return models.Sound.objects.order_by('-id')
<commit_msg>[casslist] Fix error when making first migrations in a new project<commit_after>from django.db import OperationalError
from django.views import generic
from django.db.models import Sum
from cassupload import models
class CassListView(generic.ListView):
template_name = 'casslist/index.html'
context_object_name = 'cass_sound_list'
try:
total_plays = models.Sound.objects.all().aggregate(Sum('play_count'))['play_count__sum']
except OperationalError: # The database is empty.
total_plays = 0
def get_queryset(self):
return models.Sound.objects.order_by('-id')
|
2f4b57b2b7c5b391af615a204ad85dd04cc780d3
|
chatroom/views.py
|
chatroom/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
return render(request, 'order.html')
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from login.views import isLogin
from login import auth
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
if isLogin(request):
data = auth.get_user_data(request)
if auth.hasProfile(data.uuid):
profile = auth.get_user_profile(request)
return render(request, "order.html", {'realname' : profile.real_name,
'email' : profile.email,
'shipping_address' : profile.default_shipping_address,
'phone' : profile.phone_number})
else:
redirect("/profile/")
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
Load profiles on the order page
|
Load profiles on the order page
|
Python
|
mit
|
sonicyang/chiphub,sonicyang/chiphub,sonicyang/chiphub
|
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
return render(request, 'order.html')
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
Load profiles on the order page
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from login.views import isLogin
from login import auth
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
if isLogin(request):
data = auth.get_user_data(request)
if auth.hasProfile(data.uuid):
profile = auth.get_user_profile(request)
return render(request, "order.html", {'realname' : profile.real_name,
'email' : profile.email,
'shipping_address' : profile.default_shipping_address,
'phone' : profile.phone_number})
else:
redirect("/profile/")
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
return render(request, 'order.html')
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
<commit_msg>Load profiles on the order page<commit_after>
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from login.views import isLogin
from login import auth
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
if isLogin(request):
data = auth.get_user_data(request)
if auth.hasProfile(data.uuid):
profile = auth.get_user_profile(request)
return render(request, "order.html", {'realname' : profile.real_name,
'email' : profile.email,
'shipping_address' : profile.default_shipping_address,
'phone' : profile.phone_number})
else:
redirect("/profile/")
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
return render(request, 'order.html')
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
Load profiles on the order pagefrom django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from login.views import isLogin
from login import auth
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
if isLogin(request):
data = auth.get_user_data(request)
if auth.hasProfile(data.uuid):
profile = auth.get_user_profile(request)
return render(request, "order.html", {'realname' : profile.real_name,
'email' : profile.email,
'shipping_address' : profile.default_shipping_address,
'phone' : profile.phone_number})
else:
redirect("/profile/")
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
return render(request, 'order.html')
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
<commit_msg>Load profiles on the order page<commit_after>from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from login.views import isLogin
from login import auth
def index(request):
return render(request, 'index.html')
def append(request):
# open("data", "a").write(str(request.args.get("msg")) + "\n\r")
open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8'))
return HttpResponse("")
def retreive(request):
fil = open("/tmp/data", "rb")
payload = fil.read()
return HttpResponse(payload)
def order(request):
if isLogin(request):
data = auth.get_user_data(request)
if auth.hasProfile(data.uuid):
profile = auth.get_user_profile(request)
return render(request, "order.html", {'realname' : profile.real_name,
'email' : profile.email,
'shipping_address' : profile.default_shipping_address,
'phone' : profile.phone_number})
else:
redirect("/profile/")
def faq(request):
return render(request, 'faq.html')
def about_us(request):
return render(request, 'about_us.html')
def progress(request):
return render(request, 'progress.html')
def exchange(request):
return render(request, 'exchange.html')
def chatroom(request):
return render(request, 'chatroom.html')
|
070b02c17e423e446562828af3ef69d06667472b
|
server/users/schema/queries.py
|
server/users/schema/queries.py
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
try:
token_user_id = get_token_user_id(args, context)
user = get_user_model().objects.get(id=token_user_id)
print(user)
return Viewer(
id=0,
user=user
)
except BaseException:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
users = get_user_model()
try:
token_user_id = get_token_user_id(args, context)
user = users.objects.get(id=token_user_id)
return Viewer(
id=0,
user=user
)
except users.DoesNotExist:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
Add better exception to viewer resolver
|
Add better exception to viewer resolver
|
Python
|
mit
|
ncrmro/reango,ncrmro/reango,ncrmro/ango,ncrmro/reango,ncrmro/ango,ncrmro/ango
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
try:
token_user_id = get_token_user_id(args, context)
user = get_user_model().objects.get(id=token_user_id)
print(user)
return Viewer(
id=0,
user=user
)
except BaseException:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
Add better exception to viewer resolver
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
users = get_user_model()
try:
token_user_id = get_token_user_id(args, context)
user = users.objects.get(id=token_user_id)
return Viewer(
id=0,
user=user
)
except users.DoesNotExist:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
<commit_before>from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
try:
token_user_id = get_token_user_id(args, context)
user = get_user_model().objects.get(id=token_user_id)
print(user)
return Viewer(
id=0,
user=user
)
except BaseException:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
<commit_msg>Add better exception to viewer resolver<commit_after>
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
users = get_user_model()
try:
token_user_id = get_token_user_id(args, context)
user = users.objects.get(id=token_user_id)
return Viewer(
id=0,
user=user
)
except users.DoesNotExist:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
try:
token_user_id = get_token_user_id(args, context)
user = get_user_model().objects.get(id=token_user_id)
print(user)
return Viewer(
id=0,
user=user
)
except BaseException:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
Add better exception to viewer resolverfrom django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
users = get_user_model()
try:
token_user_id = get_token_user_id(args, context)
user = users.objects.get(id=token_user_id)
return Viewer(
id=0,
user=user
)
except users.DoesNotExist:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
<commit_before>from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
try:
token_user_id = get_token_user_id(args, context)
user = get_user_model().objects.get(id=token_user_id)
print(user)
return Viewer(
id=0,
user=user
)
except BaseException:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
<commit_msg>Add better exception to viewer resolver<commit_after>from django.contrib.auth import get_user_model
from graphene import AbstractType, Field, String
from users.jwt_util import get_token_user_id
from .definitions import Viewer
class UserQueries(AbstractType):
viewer = Field(Viewer)
@staticmethod
def resolve_viewer(self, args, context, info):
users = get_user_model()
try:
token_user_id = get_token_user_id(args, context)
user = users.objects.get(id=token_user_id)
return Viewer(
id=0,
user=user
)
except users.DoesNotExist:
return Viewer(
id=0,
user=get_user_model()(
id=0,
email=""
)
)
|
e696fa2d398eb331cd5e25b2085b9d5c1e892aa1
|
server/lib/python/cartodb_services/test/test_mapboxtrueisoline.py
|
server/lib/python/cartodb_services/test/test_mapboxtrueisoline.py
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_invalid_time_range(self):
time_ranges = [4000]
with self.assertRaises(ValueError):
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
Add test to validate time ranges
|
Add test to validate time ranges
|
Python
|
bsd-3-clause
|
CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/dataservices-api
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
Add test to validate time ranges
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_invalid_time_range(self):
time_ranges = [4000]
with self.assertRaises(ValueError):
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
<commit_before>import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
<commit_msg>Add test to validate time ranges<commit_after>
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_invalid_time_range(self):
time_ranges = [4000]
with self.assertRaises(ValueError):
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
Add test to validate time rangesimport unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_invalid_time_range(self):
time_ranges = [4000]
with self.assertRaises(ValueError):
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
<commit_before>import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
<commit_msg>Add test to validate time ranges<commit_after>import unittest
from mock import Mock
from cartodb_services.mapbox.true_isolines import MapboxTrueIsolines, DEFAULT_PROFILE
from cartodb_services.tools import Coordinate
from credentials import mapbox_api_key
VALID_ORIGIN = Coordinate(-73.989, 40.733)
class MapboxTrueIsolinesTestCase(unittest.TestCase):
def setUp(self):
self.mapbox_isolines = MapboxTrueIsolines(apikey=mapbox_api_key(),
logger=Mock())
def test_invalid_time_range(self):
time_ranges = [4000]
with self.assertRaises(ValueError):
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
def test_calculate_isochrone(self):
time_ranges = [300, 900]
solution = self.mapbox_isolines.calculate_isochrone(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
time_ranges=time_ranges)
assert solution
def test_calculate_isodistance(self):
distance_range = 10000
solution = self.mapbox_isolines.calculate_isodistance(
origin=VALID_ORIGIN,
profile=DEFAULT_PROFILE,
distance_range=distance_range)
assert solution
|
5f9d8b30313200d9baa55ea468ad5b94481ba871
|
bianca/orm/repository.py
|
bianca/orm/repository.py
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
Make repo serializable via as_dict
|
Make repo serializable via as_dict
|
Python
|
mit
|
bumper-app/bumper-bianca,bumper-app/bumper-bianca
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
Make repo serializable via as_dict
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
<commit_before>"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
<commit_msg>Make repo serializable via as_dict<commit_after>
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
Make repo serializable via as_dict"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
<commit_before>"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'url': self.url,
'creation_date': self.creation_date,
'ingestion_date': self.ingestion_date,
'last_ingested_commit': self.last_ingested_commit,
'analysis_date': self.analysis_date,
'status': self.status,
'email': self.email,
'listed': self.listed,
'last_data_dump': self.last_data_dump
}
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
# def __repr__(self):
# return "<Repository: %s - %s>" % (self.name, self.id)
<commit_msg>Make repo serializable via as_dict<commit_after>"""
file: repository.py
author: Ben Grawi <bjg1568@rit.edu>
date: October 2013
description: Holds the repository abstraction class and ORM
"""
import uuid
from db import *
from datetime import datetime
class Repository(Base):
"""
Commit():
description: The SQLAlchemy ORM for the repository table
"""
__tablename__ = 'repositories'
id = Column(String, primary_key=True)
name = Column(String)
url = Column(String)
creation_date = Column(String)
ingestion_date = Column(String)
last_ingested_commit = Column(String)
analysis_date = Column(String)
status = Column(String)
email = Column(String)
listed = Column(Boolean)
last_data_dump = Column(String)
def __init__(self, *args, **kwargs):
self.id = str(uuid.uuid1())
self.creation_date = str(datetime.now().replace(microsecond=0))
self.url = kwargs.pop('url', None)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
def __repr__(self):
return "<Repository: %s - %s>" % (self.name, self.id)
|
465977c2228620877b196e46ca883c743aeed856
|
cf_predict/test/conftest.py
|
cf_predict/test/conftest.py
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
X = np.random.random_sample((20, 5))
y = np.random.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
return {"1.0.0": pickle.dumps(lm),
"1.1.0": pickle.dumps(dt),
"1.2.0": pickle.dumps(svr)}
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from mockredis import MockRedis
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
rng = np.random.RandomState(42)
X = rng.random_sample((20, 5))
y = rng.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
r = MockRedis()
r.set("1.0.0", pickle.dumps(lm))
r.set("1.1.0", pickle.dumps(dt))
r.set("1.2.0", pickle.dumps(svr))
return r
|
Use MockRedis instead of dict to mock redis in unit tests
|
Use MockRedis instead of dict to mock redis in unit tests
|
Python
|
mit
|
ronert/cf-predict,ronert/cf-predict
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
X = np.random.random_sample((20, 5))
y = np.random.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
return {"1.0.0": pickle.dumps(lm),
"1.1.0": pickle.dumps(dt),
"1.2.0": pickle.dumps(svr)}
Use MockRedis instead of dict to mock redis in unit tests
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from mockredis import MockRedis
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
rng = np.random.RandomState(42)
X = rng.random_sample((20, 5))
y = rng.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
r = MockRedis()
r.set("1.0.0", pickle.dumps(lm))
r.set("1.1.0", pickle.dumps(dt))
r.set("1.2.0", pickle.dumps(svr))
return r
|
<commit_before>"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
X = np.random.random_sample((20, 5))
y = np.random.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
return {"1.0.0": pickle.dumps(lm),
"1.1.0": pickle.dumps(dt),
"1.2.0": pickle.dumps(svr)}
<commit_msg>Use MockRedis instead of dict to mock redis in unit tests<commit_after>
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from mockredis import MockRedis
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
rng = np.random.RandomState(42)
X = rng.random_sample((20, 5))
y = rng.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
r = MockRedis()
r.set("1.0.0", pickle.dumps(lm))
r.set("1.1.0", pickle.dumps(dt))
r.set("1.2.0", pickle.dumps(svr))
return r
|
"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
X = np.random.random_sample((20, 5))
y = np.random.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
return {"1.0.0": pickle.dumps(lm),
"1.1.0": pickle.dumps(dt),
"1.2.0": pickle.dumps(svr)}
Use MockRedis instead of dict to mock redis in unit tests"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from mockredis import MockRedis
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
rng = np.random.RandomState(42)
X = rng.random_sample((20, 5))
y = rng.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
r = MockRedis()
r.set("1.0.0", pickle.dumps(lm))
r.set("1.1.0", pickle.dumps(dt))
r.set("1.2.0", pickle.dumps(svr))
return r
|
<commit_before>"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
X = np.random.random_sample((20, 5))
y = np.random.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
return {"1.0.0": pickle.dumps(lm),
"1.1.0": pickle.dumps(dt),
"1.2.0": pickle.dumps(svr)}
<commit_msg>Use MockRedis instead of dict to mock redis in unit tests<commit_after>"""Unit tests configuration file."""
import pickle
import numpy as np
import pytest
from sklearn import linear_model, tree, svm
from mockredis import MockRedis
from cf_predict import create_app
def pytest_configure(config):
"""Disable verbose output when running tests."""
terminal = config.pluginmanager.getplugin('terminal')
base = terminal.TerminalReporter
class QuietReporter(base):
"""A py.test reporting that only shows dots when running tests."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.verbosity = 0
self.showlongtestinfo = False
self.showfspath = False
terminal.TerminalReporter = QuietReporter
@pytest.fixture
def app(monkeypatch):
"""Create a Flask test client."""
monkeypatch.setattr("cf_predict.resources.get_db", models)
app = create_app("unit_testing")
return app
def models():
"""Create some sample machine learning models."""
rng = np.random.RandomState(42)
X = rng.random_sample((20, 5))
y = rng.random_sample(20)
lm = linear_model.LinearRegression()
dt = tree.DecisionTreeRegressor()
svr = svm.SVR()
lm.fit(X, y)
dt.fit(X, y)
svr.fit(X, y)
r = MockRedis()
r.set("1.0.0", pickle.dumps(lm))
r.set("1.1.0", pickle.dumps(dt))
r.set("1.2.0", pickle.dumps(svr))
return r
|
1c7af58f9fabb5edfc559660d742825d3fbdefb0
|
chaoswg/forms.py
|
chaoswg/forms.py
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
time_factor = FloatField(u'Time Factor', validators=[NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')])
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
submit = SubmitField(u'Do Task now')
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points',
validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
time_factor = FloatField(u'Time Factor',
validators=[InputRequired(), NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')],
default=0.0)
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
submit = SubmitField(u'Do Task now')
|
Add some defaults for form input
|
Add some defaults for form input
|
Python
|
agpl-3.0
|
Obihoernchen/ChaosWG-Manager,Obihoernchen/ChaosWG-Manager,Obihoernchen/ChaosWG-Manager
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
time_factor = FloatField(u'Time Factor', validators=[NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')])
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
submit = SubmitField(u'Do Task now')
Add some defaults for form input
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points',
validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
time_factor = FloatField(u'Time Factor',
validators=[InputRequired(), NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')],
default=0.0)
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
submit = SubmitField(u'Do Task now')
|
<commit_before>from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
time_factor = FloatField(u'Time Factor', validators=[NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')])
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
submit = SubmitField(u'Do Task now')
<commit_msg>Add some defaults for form input<commit_after>
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points',
validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
time_factor = FloatField(u'Time Factor',
validators=[InputRequired(), NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')],
default=0.0)
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
submit = SubmitField(u'Do Task now')
|
from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
time_factor = FloatField(u'Time Factor', validators=[NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')])
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
submit = SubmitField(u'Do Task now')
Add some defaults for form inputfrom flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points',
validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
time_factor = FloatField(u'Time Factor',
validators=[InputRequired(), NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')],
default=0.0)
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
submit = SubmitField(u'Do Task now')
|
<commit_before>from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
time_factor = FloatField(u'Time Factor', validators=[NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')])
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[NumberRange(1, 13, 'Value must be between 1 and 13')])
submit = SubmitField(u'Do Task now')
<commit_msg>Add some defaults for form input<commit_after>from flask_wtf import FlaskForm
from wtforms.fields import StringField, PasswordField, IntegerField, FloatField, SubmitField
from wtforms.validators import InputRequired, NumberRange, Optional
class LoginForm(FlaskForm):
name = StringField(u'Username', validators=[InputRequired()])
password = PasswordField(u'Password', validators=[InputRequired()])
submit = SubmitField(u'Login')
class CreateTaskForm(FlaskForm):
task = StringField(u'Task', validators=[InputRequired()])
base_points = IntegerField(u'Base Points',
validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
time_factor = FloatField(u'Time Factor',
validators=[InputRequired(), NumberRange(0.0, 3.0, 'Value must be between 0.0 and 3.0')],
default=0.0)
schedule_days = IntegerField(u'Schedule every X days (optional)',
validators=[Optional(), NumberRange(1, 365, 'Value must be between 1 and 365')])
submit = SubmitField(u'Create Task')
class CustomTaskForm(FlaskForm):
task = StringField(u'Custom Task', validators=[InputRequired()])
points = IntegerField(u'Points', validators=[InputRequired(), NumberRange(1, 13, 'Value must be between 1 and 13')],
default=1)
submit = SubmitField(u'Do Task now')
|
33e1e41e867e996baccddb9a892ec05bbd4f93f9
|
polls/views.py
|
polls/views.py
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = Choice.objects.get(poll_id=p.pk, pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
Choice.objects.filter(pk=pk, poll_id=p.pk).update(votes=F('votes') + 1)
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
Fix vote view to work with sharded Choice
|
Fix vote view to work with sharded Choice
|
Python
|
apache-2.0
|
disqus/sharding-example,komuW/sharding-example,komuW/sharding-example
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
Fix vote view to work with sharded Choice
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = Choice.objects.get(poll_id=p.pk, pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
Choice.objects.filter(pk=pk, poll_id=p.pk).update(votes=F('votes') + 1)
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
<commit_before>from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
<commit_msg>Fix vote view to work with sharded Choice<commit_after>
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = Choice.objects.get(poll_id=p.pk, pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
Choice.objects.filter(pk=pk, poll_id=p.pk).update(votes=F('votes') + 1)
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
Fix vote view to work with sharded Choicefrom django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = Choice.objects.get(poll_id=p.pk, pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
Choice.objects.filter(pk=pk, poll_id=p.pk).update(votes=F('votes') + 1)
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
<commit_before>from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
<commit_msg>Fix vote view to work with sharded Choice<commit_after>from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.template import RequestContext
from polls.models import Choice, Poll
def vote(request, poll_id):
p = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = Choice.objects.get(poll_id=p.pk, pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the poll voting form.
return render_to_response('polls/detail.html', {
'poll': p,
'error_message': "You didn't select a choice.",
}, context_instance=RequestContext(request))
else:
Choice.objects.filter(pk=pk, poll_id=p.pk).update(votes=F('votes') + 1)
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
|
2b3df42f77c7277369631c1b31266a41526bf90c
|
src/rotest/management/migrations/0002_auto_20150224_1427.py
|
src/rotest/management/migrations/0002_auto_20150224_1427.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
Revert the superuser creation in a migration
|
Revert the superuser creation in a migration
|
Python
|
mit
|
gregoil/rotest
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
Revert the superuser creation in a migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
<commit_msg>Revert the superuser creation in a migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
Revert the superuser creation in a migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
<commit_msg>Revert the superuser creation in a migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth import models as auth_models
ADMIN_USERNAME = "rotest"
ADMIN_PASSWORD = "rotest"
def create_users(apps, schema_editor):
qa_group, _ = auth_models.Group.objects.get_or_create(name="QA")
localhost, _ = auth_models.User.objects.get_or_create(username="localhost",
password="localhost",
email="l@l.com")
qa_group.user_set.add(localhost)
try:
auth_models.User.objects.get(username=ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
auth_models.User.objects.create_superuser(ADMIN_USERNAME,
"rotest@rotest.com",
ADMIN_PASSWORD)
class Migration(migrations.Migration):
dependencies = [
('management', '0001_initial'),
]
operations = [migrations.RunPython(create_users)
]
|
c7ec4e6be21718ed7b9b94aed2815150d8e4b95f
|
cheroot/test/test_compat.py
|
cheroot/test/test_compat.py
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
with self.assertRaises(TypeError):
compat.ntob('fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
self.assertRaises(TypeError, compat.ntob, 'fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
Revert ntob check to imperative style
|
Revert ntob check to imperative style
As context manager isn't available under Python 2.6
|
Python
|
bsd-3-clause
|
cherrypy/cheroot
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
with self.assertRaises(TypeError):
compat.ntob('fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
Revert ntob check to imperative style
As context manager isn't available under Python 2.6
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
self.assertRaises(TypeError, compat.ntob, 'fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
<commit_before>"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
with self.assertRaises(TypeError):
compat.ntob('fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
<commit_msg>Revert ntob check to imperative style
As context manager isn't available under Python 2.6<commit_after>
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
self.assertRaises(TypeError, compat.ntob, 'fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
with self.assertRaises(TypeError):
compat.ntob('fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
Revert ntob check to imperative style
As context manager isn't available under Python 2.6"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
self.assertRaises(TypeError, compat.ntob, 'fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
<commit_before>"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
with self.assertRaises(TypeError):
compat.ntob('fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
<commit_msg>Revert ntob check to imperative style
As context manager isn't available under Python 2.6<commit_after>"""Test Python 2/3 compatibility module."""
from __future__ import unicode_literals
import unittest
import pytest
import six
from cheroot import _compat as compat
class StringTester(unittest.TestCase):
"""Tests for string conversion."""
@pytest.mark.skipif(six.PY3, reason='Only useful on Python 2')
def test_ntob_non_native(self):
"""ntob should raise an Exception on unicode.
(Python 2 only)
See #1132 for discussion.
"""
self.assertRaises(TypeError, compat.ntob, 'fight')
class EscapeTester(unittest.TestCase):
"""Class to test escape_html function from _cpcompat."""
def test_escape_quote(self):
"""Verify the output for &<>"' chars."""
self.assertEqual("""xx&<>"aa'""", compat.escape_html("""xx&<>"aa'"""))
|
fab9c33ed2e4e8c7c43ecf548dbc49c7b8cfd752
|
observatory/manage.py
|
observatory/manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import os.path
import sys
if __name__ == "__main__":
#Include parent directory in the path by default
path = os.path.abspath('../')
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "observatory.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Add parent directory in path by default
|
Add parent directory in path by default
|
Python
|
isc
|
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Add parent directory in path by default
|
#!/usr/bin/env python
import os
import os.path
import sys
if __name__ == "__main__":
#Include parent directory in the path by default
path = os.path.abspath('../')
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "observatory.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Add parent directory in path by default<commit_after>
|
#!/usr/bin/env python
import os
import os.path
import sys
if __name__ == "__main__":
#Include parent directory in the path by default
path = os.path.abspath('../')
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "observatory.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Add parent directory in path by default#!/usr/bin/env python
import os
import os.path
import sys
if __name__ == "__main__":
#Include parent directory in the path by default
path = os.path.abspath('../')
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "observatory.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Add parent directory in path by default<commit_after>#!/usr/bin/env python
import os
import os.path
import sys
if __name__ == "__main__":
#Include parent directory in the path by default
path = os.path.abspath('../')
if path not in sys.path:
sys.path.append(path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "observatory.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
67fcadfa8fd3e6c4161ca4756cc65f0db1386c06
|
usercustomize.py
|
usercustomize.py
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
cgitb.enable(format='text')
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
import sys
import os
import os.path
cgitb.enable(format='text')
sys.path.insert(0, os.path.join(os.environ['HOME'],
'gtk/inst/lib/python2.7/site-packages'))
|
Add OS X GTK to Python path.
|
Add OS X GTK to Python path.
|
Python
|
mit
|
fossilet/dotfiles,fossilet/dotfiles,fossilet/dotfiles
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
cgitb.enable(format='text')
Add OS X GTK to Python path.
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
import sys
import os
import os.path
cgitb.enable(format='text')
sys.path.insert(0, os.path.join(os.environ['HOME'],
'gtk/inst/lib/python2.7/site-packages'))
|
<commit_before>""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
cgitb.enable(format='text')
<commit_msg>Add OS X GTK to Python path.<commit_after>
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
import sys
import os
import os.path
cgitb.enable(format='text')
sys.path.insert(0, os.path.join(os.environ['HOME'],
'gtk/inst/lib/python2.7/site-packages'))
|
""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
cgitb.enable(format='text')
Add OS X GTK to Python path.""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
import sys
import os
import os.path
cgitb.enable(format='text')
sys.path.insert(0, os.path.join(os.environ['HOME'],
'gtk/inst/lib/python2.7/site-packages'))
|
<commit_before>""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
cgitb.enable(format='text')
<commit_msg>Add OS X GTK to Python path.<commit_after>""" Customize Python Interpreter.
Link your user customizing file to this file.
For more info see: https://docs.python.org/3/library/site.html
"Default value is ~/.local/lib/pythonX.Y/site-packages for UNIX and
non-framework Mac OS X builds, ~/Library/Python/X.Y/lib/python/site-packages
for Mac framework builds, and %APPDATA%\Python\PythonXY\site-packages on
Windows."
Sun May 4 18:06:08 CST 2014
"""
import cgitb
import sys
import os
import os.path
cgitb.enable(format='text')
sys.path.insert(0, os.path.join(os.environ['HOME'],
'gtk/inst/lib/python2.7/site-packages'))
|
4567bf8044ece7b4ef5ccc9cf81a7dac7bcab017
|
takeyourmeds/groups/models.py
|
takeyourmeds/groups/models.py
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.token,
self.user_id,
)
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.access_token,
self.user_id,
)
|
Correct unicode for access tokens
|
Correct unicode for access tokens
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
mit
|
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.token,
self.user_id,
)
Correct unicode for access tokens
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.access_token,
self.user_id,
)
|
<commit_before>import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.token,
self.user_id,
)
<commit_msg>Correct unicode for access tokens
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.access_token,
self.user_id,
)
|
import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.token,
self.user_id,
)
Correct unicode for access tokens
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.access_token,
self.user_id,
)
|
<commit_before>import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.token,
self.user_id,
)
<commit_msg>Correct unicode for access tokens
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>import datetime
import functools
from django.db import models
from django.utils.crypto import get_random_string
from .managers import GroupManager
class Group(models.Model):
"""
Instances must be created using ``Group.objects.create_group`` to ensure
Stripe is configured correctly.
"""
name = models.CharField(max_length=255, unique=True)
created = models.DateTimeField(default=datetime.datetime.utcnow)
objects = GroupManager()
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"name=%r" % (
self.name,
)
class AccessToken(models.Model):
group = models.ForeignKey(Group, related_name='access_tokens')
access_token = models.CharField(
unique=True,
max_length=8,
default=functools.partial(get_random_string, 8, 'ACEFHKJMLNPRUTWVYX'),
)
user = models.OneToOneField(
'account.User',
null=True,
related_name='token',
)
created = models.DateTimeField(default=datetime.datetime.utcnow)
class Meta:
ordering = ('-created',)
get_latest_by = 'created'
def __unicode__(self):
return u"group_id=%r token=%r user_id=%s" % (
self.group_id,
self.access_token,
self.user_id,
)
|
61ea8ac67279cb46800237546384cf2e85180d1b
|
test/order/TestOrderFile.py
|
test/order/TestOrderFile.py
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
def test_order_file(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDefault()
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_with_dsym(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDsym()
self.order_file()
def test_with_dwarf(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDwarf()
self.order_file()
def order_file(self):
"""Test debug symbols follow the correct order by the order file."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
Test order file with both dsym and dwarf combination.
|
Test order file with both dsym and dwarf combination.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@113884 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
def test_order_file(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDefault()
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Test order file with both dsym and dwarf combination.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@113884 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_with_dsym(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDsym()
self.order_file()
def test_with_dwarf(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDwarf()
self.order_file()
def order_file(self):
"""Test debug symbols follow the correct order by the order file."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
def test_order_file(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDefault()
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Test order file with both dsym and dwarf combination.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@113884 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_with_dsym(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDsym()
self.order_file()
def test_with_dwarf(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDwarf()
self.order_file()
def order_file(self):
"""Test debug symbols follow the correct order by the order file."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
def test_order_file(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDefault()
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Test order file with both dsym and dwarf combination.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@113884 91177308-0d34-0410-b5e6-96231b3b80d8"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_with_dsym(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDsym()
self.order_file()
def test_with_dwarf(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDwarf()
self.order_file()
def order_file(self):
"""Test debug symbols follow the correct order by the order file."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
def test_order_file(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDefault()
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Test order file with both dsym and dwarf combination.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@113884 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
Test that debug symbols have the correct order as specified by the order file.
"""
import os, time
import re
import unittest2
import lldb
from lldbtest import *
class OrderFileTestCase(TestBase):
mydir = "order"
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
def test_with_dsym(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDsym()
self.order_file()
def test_with_dwarf(self):
"""Test debug symbols follow the correct order by the order file."""
self.buildDwarf()
self.order_file()
def order_file(self):
"""Test debug symbols follow the correct order by the order file."""
exe = os.path.join(os.getcwd(), "a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
self.runCmd("image dump symtab a.out")
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
e22794f07c6d1027e16617ac6874289794080967
|
account_payment_include_draft_move/wizard/payment_order_create.py
|
account_payment_include_draft_move/wizard/payment_order_create.py
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
domain.remove(POSTED_MOVE_DOMAIN)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
pos = domain.index(POSTED_MOVE_DOMAIN)
domain[pos] = (1, '=', 1)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
Replace partial domain by (1,'=',1)
|
[IMP] Replace partial domain by (1,'=',1)
|
Python
|
agpl-3.0
|
sergio-incaser/bank-payment,sergio-incaser/bank-payment,sergiocorato/bank-payment,damdam-s/bank-payment,rlizana/bank-payment,CompassionCH/bank-payment,incaser/bank-payment,sergiocorato/bank-payment,Antiun/bank-payment,sergio-teruel/bank-payment,rlizana/bank-payment,ndtran/bank-payment,David-Amaro/bank-payment,sergio-teruel/bank-payment,Antiun/bank-payment,hbrunn/bank-payment,syci/bank-payment,acsone/bank-payment,CompassionCH/bank-payment,syci/bank-payment,David-Amaro/bank-payment,diagramsoftware/bank-payment,open-synergy/bank-payment,damdam-s/bank-payment,ndtran/bank-payment
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
domain.remove(POSTED_MOVE_DOMAIN)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
[IMP] Replace partial domain by (1,'=',1)
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
pos = domain.index(POSTED_MOVE_DOMAIN)
domain[pos] = (1, '=', 1)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
<commit_before># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
domain.remove(POSTED_MOVE_DOMAIN)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
<commit_msg>[IMP] Replace partial domain by (1,'=',1)<commit_after>
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
pos = domain.index(POSTED_MOVE_DOMAIN)
domain[pos] = (1, '=', 1)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
domain.remove(POSTED_MOVE_DOMAIN)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
[IMP] Replace partial domain by (1,'=',1)# -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
pos = domain.index(POSTED_MOVE_DOMAIN)
domain[pos] = (1, '=', 1)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
<commit_before># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
domain.remove(POSTED_MOVE_DOMAIN)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
<commit_msg>[IMP] Replace partial domain by (1,'=',1)<commit_after># -*- coding: utf-8 -*-
#
##############################################################################
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
POSTED_MOVE_DOMAIN = ('move_id.state', '=', 'posted')
class PaymentOrderCreate(models.TransientModel):
_inherit = 'payment.order.create'
@api.model
def extend_payment_order_domain(self, payment_order, domain):
if POSTED_MOVE_DOMAIN in domain:
pos = domain.index(POSTED_MOVE_DOMAIN)
domain[pos] = (1, '=', 1)
return super(PaymentOrderCreate, self)\
.extend_payment_order_domain(payment_order, domain)
|
1fd2299b2a0c993bd463ab88c0a7544ade2c945b
|
test_kasp/disk/test_disk.py
|
test_kasp/disk/test_disk.py
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
def __init__(self):
self.WRITE_MB = 128
self.WRITE_BLOCK_KB = 1024
self.READ_BLOCK_B = 512
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
Remove init mrthod from disk test
|
Remove init mrthod from disk test
Removed init method from test class for disk test
|
Python
|
apache-2.0
|
vrovachev/kaspersky-framework
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
def __init__(self):
self.WRITE_MB = 128
self.WRITE_BLOCK_KB = 1024
self.READ_BLOCK_B = 512
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
Remove init mrthod from disk test
Removed init method from test class for disk test
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
<commit_before># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
def __init__(self):
self.WRITE_MB = 128
self.WRITE_BLOCK_KB = 1024
self.READ_BLOCK_B = 512
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
<commit_msg>Remove init mrthod from disk test
Removed init method from test class for disk test<commit_after>
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
def __init__(self):
self.WRITE_MB = 128
self.WRITE_BLOCK_KB = 1024
self.READ_BLOCK_B = 512
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
Remove init mrthod from disk test
Removed init method from test class for disk test# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
<commit_before># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
def __init__(self):
self.WRITE_MB = 128
self.WRITE_BLOCK_KB = 1024
self.READ_BLOCK_B = 512
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
<commit_msg>Remove init mrthod from disk test
Removed init method from test class for disk test<commit_after># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import pytest
from utils.disk_utils import DiskIO
class TestDisk:
@staticmethod
def all_free_disk_space_gb():
return reduce(lambda res, x: res+x[1], DiskIO().disks, 0)
@pytest.mark.disk
@pytest.mark.storage
def test_disk_space_storage(self):
assert self.all_free_disk_space_gb() > 3000
|
4ce6792829174e7df4614e5caeddf5b280d59822
|
comics/comics/darklegacy.py
|
comics/comics/darklegacy.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_date = "2006-12-09"
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_days = 29 * 7 # 7 weekly releases
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
Change history capability for "Dark Legacy"
|
Change history capability for "Dark Legacy"
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_date = "2006-12-09"
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
Change history capability for "Dark Legacy"
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_days = 29 * 7 # 7 weekly releases
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_date = "2006-12-09"
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
<commit_msg>Change history capability for "Dark Legacy"<commit_after>
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_days = 29 * 7 # 7 weekly releases
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_date = "2006-12-09"
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
Change history capability for "Dark Legacy"from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_days = 29 * 7 # 7 weekly releases
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_date = "2006-12-09"
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
<commit_msg>Change history capability for "Dark Legacy"<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Dark Legacy"
language = "en"
url = "http://www.darklegacycomics.com/"
start_date = "2006-01-01"
rights = "Arad Kedar"
class Crawler(CrawlerBase):
history_capable_days = 29 * 7 # 7 weekly releases
schedule = "Su"
time_zone = "US/Pacific"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.darklegacycomics.com/feed.xml")
for entry in feed.for_date(pub_date):
title = entry.title
page = self.parse_page(entry.link)
url = page.src("img.comic-image")
return CrawlerImage(url, title)
|
d927ada17522edfb91489e8558bbc88ff741a3c5
|
bokeh/models/widgets/markups.py
|
bokeh/models/widgets/markups.py
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
Introduce Markup abstract base class
|
Introduce Markup abstract base class
|
Python
|
bsd-3-clause
|
aiguofer/bokeh,jakirkham/bokeh,ChinaQuants/bokeh,percyfal/bokeh,bokeh/bokeh,philippjfr/bokeh,philippjfr/bokeh,stonebig/bokeh,percyfal/bokeh,muku42/bokeh,DuCorey/bokeh,muku42/bokeh,ericmjl/bokeh,azjps/bokeh,muku42/bokeh,percyfal/bokeh,philippjfr/bokeh,msarahan/bokeh,deeplook/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,philippjfr/bokeh,justacec/bokeh,schoolie/bokeh,jplourenco/bokeh,evidation-health/bokeh,srinathv/bokeh,tacaswell/bokeh,ptitjano/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,quasiben/bokeh,ChinaQuants/bokeh,clairetang6/bokeh,khkaminska/bokeh,evidation-health/bokeh,quasiben/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,maxalbert/bokeh,rs2/bokeh,schoolie/bokeh,timsnyder/bokeh,msarahan/bokeh,deeplook/bokeh,jplourenco/bokeh,azjps/bokeh,stonebig/bokeh,khkaminska/bokeh,ericmjl/bokeh,aiguofer/bokeh,msarahan/bokeh,mindriot101/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,htygithub/bokeh,maxalbert/bokeh,percyfal/bokeh,timsnyder/bokeh,draperjames/bokeh,aavanian/bokeh,evidation-health/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,clairetang6/bokeh,phobson/bokeh,ericmjl/bokeh,evidation-health/bokeh,rs2/bokeh,draperjames/bokeh,jplourenco/bokeh,ptitjano/bokeh,schoolie/bokeh,ptitjano/bokeh,maxalbert/bokeh,justacec/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,srinathv/bokeh,bokeh/bokeh,phobson/bokeh,azjps/bokeh,bokeh/bokeh,percyfal/bokeh,deeplook/bokeh,phobson/bokeh,mindriot101/bokeh,maxalbert/bokeh,tacaswell/bokeh,muku42/bokeh,jakirkham/bokeh,mindriot101/bokeh,justacec/bokeh,ericmjl/bokeh,deeplook/bokeh,jakirkham/bokeh,htygithub/bokeh,tacaswell/bokeh,gpfreitas/bokeh,srinathv/bokeh,draperjames/bokeh,aavanian/bokeh,khkaminska/bokeh,bokeh/bokeh,KasperPRasmussen/bokeh,azjps/bokeh,azjps/bokeh,timsnyder/bokeh,schoolie/bokeh,justacec/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,DuCorey/bokeh,stonebig/bokeh,tacaswell/bokeh,schoolie/bokeh,rs2/bokeh,phobson/bokeh,phobson/bokeh,aiguofer/bokeh,khkaminska/bokeh,clairetang6/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,dennisobrien/bokeh,srinathv/bokeh,dennisobrien/bokeh,draperjames/bokeh,aavanian/bokeh,gpfreitas/bokeh,KasperPRasmussen/bokeh,clairetang6/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,aavanian/bokeh,aiguofer/bokeh,ericmjl/bokeh,msarahan/bokeh,rs2/bokeh,timsnyder/bokeh,quasiben/bokeh,jakirkham/bokeh,gpfreitas/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,stonebig/bokeh,bokeh/bokeh,timsnyder/bokeh,jplourenco/bokeh
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
Introduce Markup abstract base class
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
<commit_before>""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
<commit_msg>Introduce Markup abstract base class<commit_after>
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
Introduce Markup abstract base class""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
<commit_before>""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
class Paragraph(Widget):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
<commit_msg>Introduce Markup abstract base class<commit_after>""" Various kinds of markup (static content) widgets.
"""
from __future__ import absolute_import
from ...properties import abstract
from ...properties import Int, String
from ..widget import Widget
@abstract
class Markup(Widget):
""" Base class for HTML markup widget models. """
class Paragraph(Markup):
""" A block (paragraph) of text.
"""
text = String(help="""
The contents of the widget.
""")
width = Int(500, help="""
The width of the block in pixels.
""")
height = Int(400, help="""
The height of the block in pixels.
""")
class PreText(Paragraph):
""" A block (paragraph) of pre-formatted text.
"""
|
2cb73ac018287ab77b380c31166ec4fc6fd99f5e
|
performanceplatform/collector/gcloud/__init__.py
|
performanceplatform/collector/gcloud/__init__.py
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
push_aggregates(data_set)
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
data_set.empty_data_set()
push_aggregates(data_set)
|
Make the G-Cloud collector empty the data set
|
Make the G-Cloud collector empty the data set
https://www.pivotaltracker.com/story/show/72073020
[Delivers #72073020]
|
Python
|
mit
|
alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
push_aggregates(data_set)
Make the G-Cloud collector empty the data set
https://www.pivotaltracker.com/story/show/72073020
[Delivers #72073020]
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
data_set.empty_data_set()
push_aggregates(data_set)
|
<commit_before>from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
push_aggregates(data_set)
<commit_msg>Make the G-Cloud collector empty the data set
https://www.pivotaltracker.com/story/show/72073020
[Delivers #72073020]<commit_after>
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
data_set.empty_data_set()
push_aggregates(data_set)
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
push_aggregates(data_set)
Make the G-Cloud collector empty the data set
https://www.pivotaltracker.com/story/show/72073020
[Delivers #72073020]from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
data_set.empty_data_set()
push_aggregates(data_set)
|
<commit_before>from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
push_aggregates(data_set)
<commit_msg>Make the G-Cloud collector empty the data set
https://www.pivotaltracker.com/story/show/72073020
[Delivers #72073020]<commit_after>from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from dshelpers import download_url
from performanceplatform.collector.gcloud.core import (
nuke_local_database, save_raw_data, aggregate_and_save,
push_aggregates)
from performanceplatform.collector.gcloud.sales_parser import (
get_latest_csv_url)
from performanceplatform.collector.write import DataSet
INDEX_URL = ('https://digitalmarketplace.blog.gov.uk'
'/sales-accreditation-information/')
def main(credentials, data_set_config, query, options, start_at, end_at,
filename=None):
nuke_local_database()
if filename is not None:
with open(filename, 'r') as f:
save_raw_data(f)
else:
save_raw_data(download_url(get_latest_csv_url(INDEX_URL)))
aggregate_and_save()
data_set = DataSet.from_config(data_set_config)
data_set.empty_data_set()
push_aggregates(data_set)
|
bd0ccca9e629b6a9c48147984b8d68cf80fe470c
|
test/single_system/bmc_test.py
|
test/single_system/bmc_test.py
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
def test_bmc_info_eleven_times(self):
"""BMC info provides expected results 11 times in a row"""
for i in range(0, 11):
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
Add a test for bmc info working eleven times in a row
|
Add a test for bmc info working eleven times in a row
This is specifically to check for SW-732, where results stop after 10
bmc info requests.
|
Python
|
bsd-3-clause
|
Cynerva/pyipmi,emaadmanzoor/pyipmi
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
Add a test for bmc info working eleven times in a row
This is specifically to check for SW-732, where results stop after 10
bmc info requests.
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
def test_bmc_info_eleven_times(self):
"""BMC info provides expected results 11 times in a row"""
for i in range(0, 11):
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
<commit_before>import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
<commit_msg>Add a test for bmc info working eleven times in a row
This is specifically to check for SW-732, where results stop after 10
bmc info requests.<commit_after>
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
def test_bmc_info_eleven_times(self):
"""BMC info provides expected results 11 times in a row"""
for i in range(0, 11):
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
Add a test for bmc info working eleven times in a row
This is specifically to check for SW-732, where results stop after 10
bmc info requests.import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
def test_bmc_info_eleven_times(self):
"""BMC info provides expected results 11 times in a row"""
for i in range(0, 11):
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
<commit_before>import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
<commit_msg>Add a test for bmc info working eleven times in a row
This is specifically to check for SW-732, where results stop after 10
bmc info requests.<commit_after>import sys, unittest
from singlesystemtest import SingleSystemTest
class TestBmcInfo(SingleSystemTest):
def test_bmc_info(self):
"""BMC info provides expected results"""
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
def test_bmc_info_eleven_times(self):
"""BMC info provides expected results 11 times in a row"""
for i in range(0, 11):
info = self.bmc.info()
check_items = self.get_checks()['BMCInfo']
for item,expected in check_items.iteritems():
self.assertEqual(expected, getattr(info, item))
tests = [TestBmcInfo]
if __name__ == '__main__':
for test in tests:
test.system = sys.argv[1]
suite = unittest.TestLoader().loadTestsFromTestCase(test)
unittest.TextTestRunner(verbosity=5).run(suite)
|
7346103a36d69d1f27bc064843afa8c18d201d2b
|
go/apps/bulk_message/definition.py
|
go/apps/bulk_message/definition.py
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
Change send bulk message display verb to 'Send message'.
|
Change send bulk message display verb to 'Send message'.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
Change send bulk message display verb to 'Send message'.
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
<commit_before>from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
<commit_msg>Change send bulk message display verb to 'Send message'.<commit_after>
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
Change send bulk message display verb to 'Send message'.from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
<commit_before>from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
<commit_msg>Change send bulk message display verb to 'Send message'.<commit_after>from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=self._conv.delivery_class,
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
fc18f86964e170c48632c614c86a0d26c9fbdd41
|
tests/test_load_module_from_file_location.py
|
tests/test_load_module_from_file_location.py
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static/app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(
loaded_module_from_file_location,
):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static" / "app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(loaded_module_from_file_location,):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
Resolve broken test in appveyor
|
Resolve broken test in appveyor
|
Python
|
mit
|
channelcat/sanic,channelcat/sanic,ashleysommer/sanic,channelcat/sanic,ashleysommer/sanic,ashleysommer/sanic,channelcat/sanic
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static/app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(
loaded_module_from_file_location,
):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
Resolve broken test in appveyor
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static" / "app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(loaded_module_from_file_location,):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
<commit_before>from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static/app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(
loaded_module_from_file_location,
):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
<commit_msg>Resolve broken test in appveyor<commit_after>
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static" / "app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(loaded_module_from_file_location,):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static/app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(
loaded_module_from_file_location,
):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
Resolve broken test in appveyorfrom pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static" / "app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(loaded_module_from_file_location,):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
<commit_before>from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static/app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(
loaded_module_from_file_location,
):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
<commit_msg>Resolve broken test in appveyor<commit_after>from pathlib import Path
from types import ModuleType
import pytest
from sanic.exceptions import LoadFileException
from sanic.utils import load_module_from_file_location
@pytest.fixture
def loaded_module_from_file_location():
return load_module_from_file_location(
str(Path(__file__).parent / "static" / "app_test_config.py")
)
@pytest.mark.dependency(name="test_load_module_from_file_location")
def test_load_module_from_file_location(loaded_module_from_file_location):
assert isinstance(loaded_module_from_file_location, ModuleType)
@pytest.mark.dependency(depends=["test_load_module_from_file_location"])
def test_loaded_module_from_file_location_name(loaded_module_from_file_location,):
assert loaded_module_from_file_location.__name__ == "app_test_config"
def test_load_module_from_file_location_with_non_existing_env_variable():
with pytest.raises(
LoadFileException,
match="The following environment variables are not set: MuuMilk",
):
load_module_from_file_location("${MuuMilk}")
|
bdcaaf4ab999c51a6633b7e72971d7594de0b66b
|
bin/clean_unused_headers.py
|
bin/clean_unused_headers.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs)
image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs)
print(header_pkgs)
print(image_pkgs)
print(header_versions)
print(image_versions)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = dict(map(
lambda x: (find_group(HEADER_PATTERN, x), x),
header_pkgs))
image_versions = dict(map(
lambda x: (find_group(IMAGE_PATTERN, x), x),
image_pkgs))
results = []
for version, pkg in header_versions.items():
if version not in image_versions:
results.append(pkg)
print(' '.join(results))
if __name__ == "__main__":
main()
|
Add python script to find unused linux-headers packages
|
Add python script to find unused linux-headers packages
|
Python
|
apache-2.0
|
elleryq/oh-my-home,elleryq/oh-my-home,elleryq/oh-my-home
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs)
image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs)
print(header_pkgs)
print(image_pkgs)
print(header_versions)
print(image_versions)
if __name__ == "__main__":
main()
Add python script to find unused linux-headers packages
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = dict(map(
lambda x: (find_group(HEADER_PATTERN, x), x),
header_pkgs))
image_versions = dict(map(
lambda x: (find_group(IMAGE_PATTERN, x), x),
image_pkgs))
results = []
for version, pkg in header_versions.items():
if version not in image_versions:
results.append(pkg)
print(' '.join(results))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs)
image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs)
print(header_pkgs)
print(image_pkgs)
print(header_versions)
print(image_versions)
if __name__ == "__main__":
main()
<commit_msg>Add python script to find unused linux-headers packages<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = dict(map(
lambda x: (find_group(HEADER_PATTERN, x), x),
header_pkgs))
image_versions = dict(map(
lambda x: (find_group(IMAGE_PATTERN, x), x),
image_pkgs))
results = []
for version, pkg in header_versions.items():
if version not in image_versions:
results.append(pkg)
print(' '.join(results))
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs)
image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs)
print(header_pkgs)
print(image_pkgs)
print(header_versions)
print(image_versions)
if __name__ == "__main__":
main()
Add python script to find unused linux-headers packages#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = dict(map(
lambda x: (find_group(HEADER_PATTERN, x), x),
header_pkgs))
image_versions = dict(map(
lambda x: (find_group(IMAGE_PATTERN, x), x),
image_pkgs))
results = []
for version, pkg in header_versions.items():
if version not in image_versions:
results.append(pkg)
print(' '.join(results))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs)
image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs)
print(header_pkgs)
print(image_pkgs)
print(header_versions)
print(image_versions)
if __name__ == "__main__":
main()
<commit_msg>Add python script to find unused linux-headers packages<commit_after>#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import re
from subprocess import check_output
IMAGE_PATTERN = re.compile(
'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
HEADER_PATTERN = re.compile(
'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic')
def get_all_packages():
for line in check_output(['dpkg', '-l']).split('\n'):
if line.startswith('ii'):
# print(line.split(' '))
yield line.split()[1]
def find_group(pattern, text):
matched = pattern.match(text)
if matched:
return '{version}-{rev}'.format(
version=matched.group('version'),
rev=matched.group('rev'))
return None
def main():
packages = list(get_all_packages())
header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages)
image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages)
header_versions = dict(map(
lambda x: (find_group(HEADER_PATTERN, x), x),
header_pkgs))
image_versions = dict(map(
lambda x: (find_group(IMAGE_PATTERN, x), x),
image_pkgs))
results = []
for version, pkg in header_versions.items():
if version not in image_versions:
results.append(pkg)
print(' '.join(results))
if __name__ == "__main__":
main()
|
5392626ef746cf52043494e7d1360fd373bdfe93
|
cort/core/util.py
|
cort/core/util.py
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
def get_java_path():
if "JAVA_HOME" in os.environ:
return os.path.join(os.environ["JAVA_HOME"], "bin", "java")
return "java"
|
Read java path from environment variable if set
|
Read java path from environment variable if set
|
Python
|
mit
|
smartschat/cort,smartschat/cort,smartschat/cort,smartschat/cort,smartschat/cort
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
Read java path from environment variable if set
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
def get_java_path():
if "JAVA_HOME" in os.environ:
return os.path.join(os.environ["JAVA_HOME"], "bin", "java")
return "java"
|
<commit_before>""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
<commit_msg>Read java path from environment variable if set<commit_after>
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
def get_java_path():
if "JAVA_HOME" in os.environ:
return os.path.join(os.environ["JAVA_HOME"], "bin", "java")
return "java"
|
""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
Read java path from environment variable if set""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
def get_java_path():
if "JAVA_HOME" in os.environ:
return os.path.join(os.environ["JAVA_HOME"], "bin", "java")
return "java"
|
<commit_before>""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
<commit_msg>Read java path from environment variable if set<commit_after>""" Utility functions. """
__author__ = 'smartschat'
def clean_via_pos(tokens, pos):
""" Clean a list of tokens according to their part-of-speech tags.
In particular, retain only tokens which do not have the part-of-speech tag
DT (determiner) or POS (possessive 's').
Args:
tokens (list(str)): A list of tokens.
pos (list(str)): A list of corresponding part-of-speech tags.
Returns:
list(str): The list of tokens which do not have part-of-speech tag
DT or POS.
"""
return [token for token, pos in zip(tokens, pos)
if pos not in ["DT", "POS"]]
def get_java_path():
if "JAVA_HOME" in os.environ:
return os.path.join(os.environ["JAVA_HOME"], "bin", "java")
return "java"
|
16d949e6ffe9a2bc76e8f76de57b6936f6d92226
|
core/tasks.py
|
core/tasks.py
|
import logging
class BaseTask(object):
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
import logging
class BaseTask(object):
task_options = {}
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
Add task_options attribute to BaseTask
|
Add task_options attribute to BaseTask
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI,cdcarter/CumulusCI,e02d96ec16/CumulusCI,Joble/CumulusCI,e02d96ec16/CumulusCI
|
import logging
class BaseTask(object):
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
Add task_options attribute to BaseTask
|
import logging
class BaseTask(object):
task_options = {}
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
<commit_before>import logging
class BaseTask(object):
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
<commit_msg>Add task_options attribute to BaseTask<commit_after>
|
import logging
class BaseTask(object):
task_options = {}
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
import logging
class BaseTask(object):
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
Add task_options attribute to BaseTaskimport logging
class BaseTask(object):
task_options = {}
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
<commit_before>import logging
class BaseTask(object):
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
<commit_msg>Add task_options attribute to BaseTask<commit_after>import logging
class BaseTask(object):
task_options = {}
def __init__(self, project_config, task_config):
self.project_config = project_config
self.task_config = task_config
self._init_logger()
self._init_task()
def _init_logger(self):
""" Initializes self.logger """
self.logger = logging.getLogger(self.__class__.__name__)
self.logger.setLevel(logging.DEBUG)
def _init_task(self):
""" A method that subclasses can override to implement dynamic logic for initializing the task """
pass
def __call__(self):
raise NotImplementedError('Subclasses should provide their own implementation')
|
67daf4140c17ce28b0ab45ddd2366968082de739
|
two_factor/auth_backends.py
|
two_factor/auth_backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
return user if verification.verified_until > now() else None
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
if verification.verified_until < now():
return None
verification.last_used_at=now()
verification.save()
return user
|
Update last_used_at after successful computer verification
|
Update last_used_at after successful computer verification
|
Python
|
mit
|
mathspace/django-two-factor-auth,percipient/django-two-factor-auth,percipient/django-two-factor-auth,moreati/django-two-factor-auth,Bouke/django-two-factor-auth,moreati/django-two-factor-auth,mathspace/django-two-factor-auth,koleror/django-two-factor-auth,koleror/django-two-factor-auth,Bouke/django-two-factor-auth,fusionbox/django-two-factor-auth,fusionbox/django-two-factor-auth
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
return user if verification.verified_until > now() else None
Update last_used_at after successful computer verification
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
if verification.verified_until < now():
return None
verification.last_used_at=now()
verification.save()
return user
|
<commit_before>from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
return user if verification.verified_until > now() else None
<commit_msg>Update last_used_at after successful computer verification<commit_after>
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
if verification.verified_until < now():
return None
verification.last_used_at=now()
verification.save()
return user
|
from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
return user if verification.verified_until > now() else None
Update last_used_at after successful computer verificationfrom django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
if verification.verified_until < now():
return None
verification.last_used_at=now()
verification.save()
return user
|
<commit_before>from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
return user if verification.verified_until > now() else None
<commit_msg>Update last_used_at after successful computer verification<commit_after>from django.contrib.auth.backends import ModelBackend
from django.utils.timezone import now
from oath import accept_totp
class TokenBackend(ModelBackend):
def authenticate(self, user, token):
accepted, drift = accept_totp(key=user.token.seed, response=token)
return user if accepted else None
class VerifiedComputerBackend(ModelBackend):
def authenticate(self, user, computer_id):
verification = user.verifiedcomputer_set.get(pk=computer_id)
if verification.verified_until < now():
return None
verification.last_used_at=now()
verification.save()
return user
|
86d844680015d60043a404f0b021463027b89e43
|
pre_commit_hooks/check_merge_conflict.py
|
pre_commit_hooks/check_merge_conflict.py
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i + 1))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
Fix off by one error in line number enumeration
|
Fix off by one error in line number enumeration
|
Python
|
mit
|
dupuy/pre-commit-hooks,jordant/pre-commit-hooks,jordant/pre-commit-hooks,pre-commit/pre-commit-hooks,Coverfox/pre-commit-hooks,bgschiller/pre-commit-hooks,chriskuehl/pre-commit-hooks,Harwood/pre-commit-hooks,arahayrabedian/pre-commit-hooks
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
Fix off by one error in line number enumeration
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i + 1))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
<commit_before>from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
<commit_msg>Fix off by one error in line number enumeration<commit_after>
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i + 1))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
Fix off by one error in line number enumerationfrom __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i + 1))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
<commit_before>from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
<commit_msg>Fix off by one error in line number enumeration<commit_after>from __future__ import print_function
import argparse
import sys
CONFLICT_PATTERNS = [
'<<<<<<< ',
'=======',
'>>>>>>> '
]
WARNING_MSG = 'Merge conflict string "{0}" found in {1}:{2}'
def detect_merge_conflict(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*')
args = parser.parse_args(argv)
retcode = 0
for filename in args.filenames:
with open(filename) as inputfile:
for i, line in enumerate(inputfile):
for pattern in CONFLICT_PATTERNS:
if line.startswith(pattern):
print(WARNING_MSG.format(pattern, filename, i + 1))
retcode = 1
return retcode
if __name__ == '__main__':
sys.exit(detect_merge_conflict())
|
e80cc896396b217a3e3a4f01294b50061faf68cd
|
cyder/cydhcp/range/forms.py
|
cyder/cydhcp/range/forms.py
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('network', 'ip_type', 'range_type', 'start_str', 'end_str',
'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'name')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
Put name first in range form
|
Put name first in range form
|
Python
|
bsd-3-clause
|
zeeman/cyder,zeeman/cyder,akeym/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,OSU-Net/cyder,murrown/cyder,zeeman/cyder,OSU-Net/cyder,OSU-Net/cyder,murrown/cyder,akeym/cyder,zeeman/cyder,drkitty/cyder,murrown/cyder,murrown/cyder,drkitty/cyder,drkitty/cyder,drkitty/cyder
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('network', 'ip_type', 'range_type', 'start_str', 'end_str',
'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'name')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
Put name first in range form
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
<commit_before>from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('network', 'ip_type', 'range_type', 'start_str', 'end_str',
'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'name')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
<commit_msg>Put name first in range form<commit_after>
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('network', 'ip_type', 'range_type', 'start_str', 'end_str',
'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'name')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
Put name first in range formfrom django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
<commit_before>from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('network', 'ip_type', 'range_type', 'start_str', 'end_str',
'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled', 'name')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
<commit_msg>Put name first in range form<commit_after>from django import forms
from cyder.base.eav.forms import get_eav_form
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.range.models import Range, RangeAV
from cyder.cydns.forms import ViewChoiceForm
class RangeForm(ViewChoiceForm, UsabilityFormMixin):
class Meta:
model = Range
exclude = ('start_upper', 'start_lower', 'end_upper', 'end_lower')
fields = ('name', 'network', 'ip_type', 'range_type', 'start_str',
'end_str', 'domain', 'is_reserved', 'allow', 'views',
'dhcpd_raw_include', 'dhcp_enabled')
widgets = {'views': forms.CheckboxSelectMultiple,
'range_type': forms.RadioSelect,
'ip_type': forms.RadioSelect}
exclude = 'range_usage'
def __init__(self, *args, **kwargs):
super(RangeForm, self).__init__(*args, **kwargs)
self.fields['dhcpd_raw_include'].label = "DHCP Config Extras"
self.fields['dhcpd_raw_include'].widget.attrs.update(
{'cols': '80',
'style': 'display: none;width: 680px'})
RangeAVForm = get_eav_form(RangeAV, Range)
|
f9aa61893ea0b7e98dc4e5b25cbf63c2fffde672
|
libs/googleapis.py
|
libs/googleapis.py
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener response:', response)
return response['id']
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener url:', url, '; response:', response)
return response['id']
|
Add url logging in google shortener
|
Add url logging in google shortener
|
Python
|
mit
|
sevazhidkov/leonard
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener response:', response)
return response['id']
Add url logging in google shortener
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener url:', url, '; response:', response)
return response['id']
|
<commit_before>import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener response:', response)
return response['id']
<commit_msg>Add url logging in google shortener<commit_after>
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener url:', url, '; response:', response)
return response['id']
|
import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener response:', response)
return response['id']
Add url logging in google shortenerimport os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener url:', url, '; response:', response)
return response['id']
|
<commit_before>import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener response:', response)
return response['id']
<commit_msg>Add url logging in google shortener<commit_after>import os
import time
import json
import requests
def get_timezone(lat, long):
response = requests.get('https://maps.googleapis.com/maps/api/timezone/json', params={
'location': '{},{}'.format(lat, long),
'timestamp': int(time.time()),
'key': os.environ['GOOGLE_API_TOKEN']
}).json()
return response['timeZoneId']
def shorten_url(url):
response = requests.post(
'https://www.googleapis.com/urlshortener/v1/url?key={}'.format(os.environ['GOOGLE_API_TOKEN']),
data=json.dumps({'longUrl': url}), headers={'Content-Type': 'application/json'}
).json()
print('Google Shortener url:', url, '; response:', response)
return response['id']
|
abb55757564cd957840ab584765239f924233295
|
pinax_theme_bootstrap/management/commands/copy_from_theme.py
|
pinax_theme_bootstrap/management/commands/copy_from_theme.py
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print f.replace(base, dest)
copy(f, f.replace(base, dest))
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print(f.replace(base, dest))
copy(f, f.replace(base, dest))
|
Fix print for python 3
|
Fix print for python 3
Fix print statement in copy_from_theme management command.
|
Python
|
mit
|
foraliving/foraliving,foraliving/foraliving,foraliving/foraliving
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print f.replace(base, dest)
copy(f, f.replace(base, dest))
Fix print for python 3
Fix print statement in copy_from_theme management command.
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print(f.replace(base, dest))
copy(f, f.replace(base, dest))
|
<commit_before>import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print f.replace(base, dest)
copy(f, f.replace(base, dest))
<commit_msg>Fix print for python 3
Fix print statement in copy_from_theme management command.<commit_after>
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print(f.replace(base, dest))
copy(f, f.replace(base, dest))
|
import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print f.replace(base, dest)
copy(f, f.replace(base, dest))
Fix print for python 3
Fix print statement in copy_from_theme management command.import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print(f.replace(base, dest))
copy(f, f.replace(base, dest))
|
<commit_before>import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print f.replace(base, dest)
copy(f, f.replace(base, dest))
<commit_msg>Fix print for python 3
Fix print statement in copy_from_theme management command.<commit_after>import errno
import glob
import os
import shutil
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
def copy(src, dest):
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('Directory not copied. Error: %s' % e)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--path",
type="string",
dest="path",
help="a glob wildcard to copy templates from"
),
)
def handle(self, *args, **options):
path = options["path"]
base = os.path.join(os.path.dirname(__file__), "../../templates")
dest = os.path.join(settings.PACKAGE_ROOT, "templates")
for f in glob.glob(os.path.join(base, path)):
print(f.replace(base, dest))
copy(f, f.replace(base, dest))
|
bcb4d817551d584965f252b7cb0df34bd19cc972
|
utils/lit/lit/LitFormats.py
|
utils/lit/lit/LitFormats.py
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
Test commit (removed extra blank line)
|
Test commit (removed extra blank line)
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@98988 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/llvm,dslab-epfl/asap,llvm-mirror/llvm,dslab-epfl/asap,dslab-epfl/asap,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,chubbymaggie/asap,chubbymaggie/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,chubbymaggie/asap,apple/swift-llvm,chubbymaggie/asap,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,chubbymaggie/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,dslab-epfl/asap,llvm-mirror/llvm,apple/swift-llvm,dslab-epfl/asap,llvm-mirror/llvm,dslab-epfl/asap,llvm-mirror/llvm,chubbymaggie/asap,llvm-mirror/llvm,llvm-mirror/llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
Test commit (removed extra blank line)
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@98988 91177308-0d34-0410-b5e6-96231b3b80d8
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
<commit_before>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
<commit_msg>Test commit (removed extra blank line)
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@98988 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
Test commit (removed extra blank line)
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@98988 91177308-0d34-0410-b5e6-96231b3b80d8from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
<commit_before>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
<commit_msg>Test commit (removed extra blank line)
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@98988 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
93282e663a03c2a62fcf9731db3d152b3d2c32c7
|
test_publisher.py
|
test_publisher.py
|
import publisher
def from_html_file():
source_file = "~/Projects/markdown-publisher/source_test.md"
print publisher.get_html_from_file(source_file)
def from_html():
test_source = "# Test heading\n\n- test item 1\n- test item 2"
print publisher.get_html(test_source)
def from_html_to_pdf():
test_html = publisher.get_html(publisher.get_text_from_file("README.md"))
test_pdf_filename = "test.pdf"
print publisher.write_html_to_pdf(test_html, test_pdf_filename)
from_html_to_pdf()
|
import publisher
test_pdf_filename = "test/test.pdf"
test_css_filename = "test/test.css"
test_md_filename = "test/test.md"
test_html_filename = "test/test.html"
test_md = "# Test heading\n\n- test item 1\n- test item 2"
def from_html_file():
print publisher.md_to_html(publisher.from_file(test_md_filename))
def md_to_html():
print publisher.md_to_html(test_source)
def md_and_css_to_html():
html_source = publisher.md_and_css_to_html(publisher.from_file(test_md_filename),
publisher.from_file(test_css_filename))
print html_source
publisher.to_file(html_source, test_html_filename)
def from_md_file_to_pdf_file():
test_html = publisher.md_to_html(publisher.from_file("README.md"))
print publisher.html_to_pdf_file(test_html, test_pdf_filename, [test_css_filename])
md_and_css_to_html()
|
Add MD+CSS processing test to test module.
|
Add MD+CSS processing test to test module.
|
Python
|
mit
|
cpgillem/markdown_publisher,cpgillem/markdown_publisher
|
import publisher
def from_html_file():
source_file = "~/Projects/markdown-publisher/source_test.md"
print publisher.get_html_from_file(source_file)
def from_html():
test_source = "# Test heading\n\n- test item 1\n- test item 2"
print publisher.get_html(test_source)
def from_html_to_pdf():
test_html = publisher.get_html(publisher.get_text_from_file("README.md"))
test_pdf_filename = "test.pdf"
print publisher.write_html_to_pdf(test_html, test_pdf_filename)
from_html_to_pdf()
Add MD+CSS processing test to test module.
|
import publisher
test_pdf_filename = "test/test.pdf"
test_css_filename = "test/test.css"
test_md_filename = "test/test.md"
test_html_filename = "test/test.html"
test_md = "# Test heading\n\n- test item 1\n- test item 2"
def from_html_file():
print publisher.md_to_html(publisher.from_file(test_md_filename))
def md_to_html():
print publisher.md_to_html(test_source)
def md_and_css_to_html():
html_source = publisher.md_and_css_to_html(publisher.from_file(test_md_filename),
publisher.from_file(test_css_filename))
print html_source
publisher.to_file(html_source, test_html_filename)
def from_md_file_to_pdf_file():
test_html = publisher.md_to_html(publisher.from_file("README.md"))
print publisher.html_to_pdf_file(test_html, test_pdf_filename, [test_css_filename])
md_and_css_to_html()
|
<commit_before>import publisher
def from_html_file():
source_file = "~/Projects/markdown-publisher/source_test.md"
print publisher.get_html_from_file(source_file)
def from_html():
test_source = "# Test heading\n\n- test item 1\n- test item 2"
print publisher.get_html(test_source)
def from_html_to_pdf():
test_html = publisher.get_html(publisher.get_text_from_file("README.md"))
test_pdf_filename = "test.pdf"
print publisher.write_html_to_pdf(test_html, test_pdf_filename)
from_html_to_pdf()
<commit_msg>Add MD+CSS processing test to test module.<commit_after>
|
import publisher
test_pdf_filename = "test/test.pdf"
test_css_filename = "test/test.css"
test_md_filename = "test/test.md"
test_html_filename = "test/test.html"
test_md = "# Test heading\n\n- test item 1\n- test item 2"
def from_html_file():
print publisher.md_to_html(publisher.from_file(test_md_filename))
def md_to_html():
print publisher.md_to_html(test_source)
def md_and_css_to_html():
html_source = publisher.md_and_css_to_html(publisher.from_file(test_md_filename),
publisher.from_file(test_css_filename))
print html_source
publisher.to_file(html_source, test_html_filename)
def from_md_file_to_pdf_file():
test_html = publisher.md_to_html(publisher.from_file("README.md"))
print publisher.html_to_pdf_file(test_html, test_pdf_filename, [test_css_filename])
md_and_css_to_html()
|
import publisher
def from_html_file():
source_file = "~/Projects/markdown-publisher/source_test.md"
print publisher.get_html_from_file(source_file)
def from_html():
test_source = "# Test heading\n\n- test item 1\n- test item 2"
print publisher.get_html(test_source)
def from_html_to_pdf():
test_html = publisher.get_html(publisher.get_text_from_file("README.md"))
test_pdf_filename = "test.pdf"
print publisher.write_html_to_pdf(test_html, test_pdf_filename)
from_html_to_pdf()
Add MD+CSS processing test to test module.import publisher
test_pdf_filename = "test/test.pdf"
test_css_filename = "test/test.css"
test_md_filename = "test/test.md"
test_html_filename = "test/test.html"
test_md = "# Test heading\n\n- test item 1\n- test item 2"
def from_html_file():
print publisher.md_to_html(publisher.from_file(test_md_filename))
def md_to_html():
print publisher.md_to_html(test_source)
def md_and_css_to_html():
html_source = publisher.md_and_css_to_html(publisher.from_file(test_md_filename),
publisher.from_file(test_css_filename))
print html_source
publisher.to_file(html_source, test_html_filename)
def from_md_file_to_pdf_file():
test_html = publisher.md_to_html(publisher.from_file("README.md"))
print publisher.html_to_pdf_file(test_html, test_pdf_filename, [test_css_filename])
md_and_css_to_html()
|
<commit_before>import publisher
def from_html_file():
source_file = "~/Projects/markdown-publisher/source_test.md"
print publisher.get_html_from_file(source_file)
def from_html():
test_source = "# Test heading\n\n- test item 1\n- test item 2"
print publisher.get_html(test_source)
def from_html_to_pdf():
test_html = publisher.get_html(publisher.get_text_from_file("README.md"))
test_pdf_filename = "test.pdf"
print publisher.write_html_to_pdf(test_html, test_pdf_filename)
from_html_to_pdf()
<commit_msg>Add MD+CSS processing test to test module.<commit_after>import publisher
test_pdf_filename = "test/test.pdf"
test_css_filename = "test/test.css"
test_md_filename = "test/test.md"
test_html_filename = "test/test.html"
test_md = "# Test heading\n\n- test item 1\n- test item 2"
def from_html_file():
print publisher.md_to_html(publisher.from_file(test_md_filename))
def md_to_html():
print publisher.md_to_html(test_source)
def md_and_css_to_html():
html_source = publisher.md_and_css_to_html(publisher.from_file(test_md_filename),
publisher.from_file(test_css_filename))
print html_source
publisher.to_file(html_source, test_html_filename)
def from_md_file_to_pdf_file():
test_html = publisher.md_to_html(publisher.from_file("README.md"))
print publisher.html_to_pdf_file(test_html, test_pdf_filename, [test_css_filename])
md_and_css_to_html()
|
95eb8e4b21b1531801fd69b13e4414c6dfb65563
|
esios/__init__.py
|
esios/__init__.py
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from service import Esios
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from .service import Esios
|
Make relative imports py2/py3 compat
|
Make relative imports py2/py3 compat
|
Python
|
mit
|
gisce/esios
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from service import Esios
Make relative imports py2/py3 compat
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from .service import Esios
|
<commit_before>try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from service import Esios
<commit_msg>Make relative imports py2/py3 compat<commit_after>
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from .service import Esios
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from service import Esios
Make relative imports py2/py3 compattry:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from .service import Esios
|
<commit_before>try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from service import Esios
<commit_msg>Make relative imports py2/py3 compat<commit_after>try:
VERSION = __import__('pkg_resources') \
.get_distribution(__name__).version
except Exception as e:
VERSION = 'unknown'
from .service import Esios
|
1b6c49c6d74dcd31c8a0e51f82932866bc99adc2
|
setup_rouge.py
|
setup_rouge.py
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
if 'HOME' not in os.environ:
home = os.environ['HOMEPATH']
else:
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
Fix bug: KeyError: 'HOME' in Windows.
|
Fix bug: KeyError: 'HOME' in Windows.
|
Python
|
mit
|
pltrdy/files2rouge,pltrdy/files2rouge
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
Fix bug: KeyError: 'HOME' in Windows.
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
if 'HOME' not in os.environ:
home = os.environ['HOMEPATH']
else:
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
<commit_before>#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
<commit_msg>Fix bug: KeyError: 'HOME' in Windows.<commit_after>
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
if 'HOME' not in os.environ:
home = os.environ['HOMEPATH']
else:
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
Fix bug: KeyError: 'HOME' in Windows.#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
if 'HOME' not in os.environ:
home = os.environ['HOMEPATH']
else:
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
<commit_before>#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
<commit_msg>Fix bug: KeyError: 'HOME' in Windows.<commit_after>#!/usr/bin/env python
"""
Utility to copy ROUGE script.
It has to be run before `setup.py`
"""
import os
import shutil
from files2rouge import settings
from six.moves import input
def copy_rouge():
if 'HOME' not in os.environ:
home = os.environ['HOMEPATH']
else:
home = os.environ['HOME']
src_rouge_root = "./files2rouge/RELEASE-1.5.5/"
default_root = os.path.join(home, '.files2rouge/')
print("files2rouge uses scripts and tools that will not be stored with "
"the python package")
path = input(
"where do you want to save it? [default: %s]" % default_root)
if path == "":
path = default_root
rouge_data = os.path.join(path, "data")
rouge_path = os.path.join(path, "ROUGE-1.5.5.pl")
print("Copying '%s' to '%s'" % (src_rouge_root, path))
shutil.copytree(src_rouge_root, path)
return {"ROUGE_path": rouge_path, "ROUGE_data": rouge_data}
conf_path = "./files2rouge/settings.json"
s = settings.Settings(path=conf_path)
data = copy_rouge()
s._generate(data)
|
77b0ac8e4230663e0c0394366185ad32fb8ff6ba
|
configurator/__init__.py
|
configurator/__init__.py
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
output = output.decode("utf-8").strip()
version = output[:output.rfind("-")]
return version
__version__ = _get_version()
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
if version.rfind("-") >= 0:
version = version[:version.rfind("-")]
return version
__version__ = _get_version()
|
Fix _get_version for tagged releases
|
Fix _get_version for tagged releases
|
Python
|
apache-2.0
|
yasserglez/configurator,yasserglez/configurator
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
output = output.decode("utf-8").strip()
version = output[:output.rfind("-")]
return version
__version__ = _get_version()
Fix _get_version for tagged releases
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
if version.rfind("-") >= 0:
version = version[:version.rfind("-")]
return version
__version__ = _get_version()
|
<commit_before>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
output = output.decode("utf-8").strip()
version = output[:output.rfind("-")]
return version
__version__ = _get_version()
<commit_msg>Fix _get_version for tagged releases<commit_after>
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
if version.rfind("-") >= 0:
version = version[:version.rfind("-")]
return version
__version__ = _get_version()
|
"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
output = output.decode("utf-8").strip()
version = output[:output.rfind("-")]
return version
__version__ = _get_version()
Fix _get_version for tagged releases"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
if version.rfind("-") >= 0:
version = version[:version.rfind("-")]
return version
__version__ = _get_version()
|
<commit_before>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
output = output.decode("utf-8").strip()
version = output[:output.rfind("-")]
return version
__version__ = _get_version()
<commit_msg>Fix _get_version for tagged releases<commit_after>"""Adaptive configuration dialogs.
Attributes:
__version__: The current version string.
"""
import os
import subprocess
def _get_version(version=None): # overwritten by setup.py
if version is None:
pkg_dir = os.path.dirname(__file__)
src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir))
git_dir = os.path.join(src_dir, ".git")
git_args = ("git", "--work-tree", src_dir, "--git-dir",
git_dir, "describe", "--tags")
output = subprocess.check_output(git_args)
version = output.decode("utf-8").strip()
if version.rfind("-") >= 0:
version = version[:version.rfind("-")]
return version
__version__ = _get_version()
|
5a03cd340e5dc8a796c7d430128f0e22be17333e
|
qiime/sdk/__init__.py
|
qiime/sdk/__init__.py
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
# Various URLs
CITATION = 'http://www.ncbi.nlm.nih.gov/pubmed/20383131'
HELP_URL = 'http://2.qiime.org'
CONDA_CHANNEL = 'https://anaconda.org/qiime2'
|
Add helper URLs to qiime.sdk
|
ENH: Add helper URLs to qiime.sdk
Adds citation url, help page, and conda channel URLs to qiime.sdk
|
Python
|
bsd-3-clause
|
biocore/qiime2,thermokarst/qiime2,ebolyen/qiime2,jakereps/qiime2,qiime2/qiime2,qiime2/qiime2,nervous-laughter/qiime2,biocore/qiime2,thermokarst/qiime2,jairideout/qiime2,jakereps/qiime2
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
ENH: Add helper URLs to qiime.sdk
Adds citation url, help page, and conda channel URLs to qiime.sdk
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
# Various URLs
CITATION = 'http://www.ncbi.nlm.nih.gov/pubmed/20383131'
HELP_URL = 'http://2.qiime.org'
CONDA_CHANNEL = 'https://anaconda.org/qiime2'
|
<commit_before># ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
<commit_msg>ENH: Add helper URLs to qiime.sdk
Adds citation url, help page, and conda channel URLs to qiime.sdk<commit_after>
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
# Various URLs
CITATION = 'http://www.ncbi.nlm.nih.gov/pubmed/20383131'
HELP_URL = 'http://2.qiime.org'
CONDA_CHANNEL = 'https://anaconda.org/qiime2'
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
ENH: Add helper URLs to qiime.sdk
Adds citation url, help page, and conda channel URLs to qiime.sdk# ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
# Various URLs
CITATION = 'http://www.ncbi.nlm.nih.gov/pubmed/20383131'
HELP_URL = 'http://2.qiime.org'
CONDA_CHANNEL = 'https://anaconda.org/qiime2'
|
<commit_before># ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
<commit_msg>ENH: Add helper URLs to qiime.sdk
Adds citation url, help page, and conda channel URLs to qiime.sdk<commit_after># ----------------------------------------------------------------------------
# Copyright (c) 2016--, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from .method import Method
from .plugin_manager import PluginManager
from .provenance import Provenance
from .visualizer import Visualizer
from .result import Result, Artifact, Visualization
from ..core.util import parse_type
__all__ = ['Result', 'Artifact', 'Visualization', 'Method', 'Visualizer',
'PluginManager', 'Provenance', 'parse_type']
# Various URLs
CITATION = 'http://www.ncbi.nlm.nih.gov/pubmed/20383131'
HELP_URL = 'http://2.qiime.org'
CONDA_CHANNEL = 'https://anaconda.org/qiime2'
|
b4814d2f86a3b86ca4c7c02bee0c255275308b9a
|
tests/conftest.py
|
tests/conftest.py
|
import os
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
import os
import subprocess
import sys
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CHAINER_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
Print installed packages in pytest
|
Print installed packages in pytest
|
Python
|
mit
|
pfnet/chainer,hvy/chainer,chainer/chainer,chainer/chainer,hvy/chainer,hvy/chainer,hvy/chainer,chainer/chainer,chainer/chainer
|
import os
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
Print installed packages in pytest
|
import os
import subprocess
import sys
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CHAINER_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
<commit_before>import os
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
<commit_msg>Print installed packages in pytest<commit_after>
|
import os
import subprocess
import sys
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CHAINER_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
import os
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
Print installed packages in pytestimport os
import subprocess
import sys
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CHAINER_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
<commit_before>import os
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
<commit_msg>Print installed packages in pytest<commit_after>import os
import subprocess
import sys
from chainer import testing
from chainer.testing import parameterized
_pairwise_parameterize = (
os.environ.get('CHAINER_TEST_PAIRWISE_PARAMETERIZATION', 'never'))
assert _pairwise_parameterize in ('never', 'always')
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CHAINER_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
def pytest_collection(session):
# Perform pairwise testing.
# TODO(kataoka): This is a tentative fix. Discuss its public interface.
if _pairwise_parameterize == 'always':
pairwise_product_dict = parameterized._pairwise_product_dict
testing.product_dict = pairwise_product_dict
parameterized.product_dict = pairwise_product_dict
def pytest_collection_finish(session):
if _pairwise_parameterize == 'always':
product_dict = parameterized._product_dict_orig
testing.product_dict = product_dict
parameterized.product_dict = product_dict
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.