commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3310d8a83871fc0644045295db60cb3bbfe7f141 | tests/_support/empty_subcollection.py | tests/_support/empty_subcollection.py | from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, subcollection=Collection())
| from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, Collection('subcollection'))
| Fix a dumb test fixture mistake | Fix a dumb test fixture mistake
| Python | bsd-2-clause | pyinvoke/invoke,pyinvoke/invoke | from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, subcollection=Collection())
Fix a dumb test fixture mistake | from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, Collection('subcollection'))
| <commit_before>from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, subcollection=Collection())
<commit_msg>Fix a dumb test fixture mistake<commit_after> | from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, Collection('subcollection'))
| from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, subcollection=Collection())
Fix a dumb test fixture mistakefrom invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, Collection('subcollection'))
| <commit_before>from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, subcollection=Collection())
<commit_msg>Fix a dumb test fixture mistake<commit_after>from invoke import task, Collection
@task
def dummy(c):
pass
ns = Collection(dummy, Collection('subcollection'))
|
7ecec2d2b516d9ae22a3a0f652424045d547d811 | test_settings.py | test_settings.py | DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| Put object_tools in the correct order in settings | Put object_tools in the correct order in settings
| Python | bsd-3-clause | felixxm/django-object-tools,praekelt/django-object-tools,shubhamdipt/django-object-tools,shubhamdipt/django-object-tools,praekelt/django-object-tools,sky-chen/django-object-tools,felixxm/django-object-tools,sky-chen/django-object-tools | DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
Put object_tools in the correct order in settings | DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| <commit_before>DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
<commit_msg>Put object_tools in the correct order in settings<commit_after> | DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
Put object_tools in the correct order in settingsDEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| <commit_before>DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
<commit_msg>Put object_tools in the correct order in settings<commit_after>DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
|
35296b1c87a86a87fbcf317e26a497fc91c287c7 | lexos/receivers/kmeans_receiver.py | lexos/receivers/kmeans_receiver.py | from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| Update receiver to catch value error | Update receiver to catch value error
| Python | mit | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
Update receiver to catch value error | from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| <commit_before>from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
<commit_msg>Update receiver to catch value error<commit_after> | from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
Update receiver to catch value errorfrom typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| <commit_before>from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
<commit_msg>Update receiver to catch value error<commit_after>from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
|
8f280cece4d59e36ebfeb5486f25c7ac92718c13 | third_problem.py | third_problem.py | letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels) | not_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
| Clean it up a bit | Clean it up a bit | Python | mit | DoublePlusGood23/lc-president-challenge | letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels)Clean it up a bit | not_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
| <commit_before>letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels)<commit_msg>Clean it up a bit<commit_after> | not_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
| letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels)Clean it up a bitnot_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
| <commit_before>letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels)<commit_msg>Clean it up a bit<commit_after>not_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
|
b64e7714e581cfc0c0a0d0f055b22c5edca27e24 | susumutakuan.py | susumutakuan.py | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | Raise KeyboardInterrupt to allow the run to handle logout | Raise KeyboardInterrupt to allow the run to handle logout
| Python | mit | gryffon/SusumuTakuan,gryffon/SusumuTakuan | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN)Raise KeyboardInterrupt to allow the run to handle logout | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | <commit_before>import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN)<commit_msg>Raise KeyboardInterrupt to allow the run to handle logout<commit_after> | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN)Raise KeyboardInterrupt to allow the run to handle logoutimport discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | <commit_before>import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN)<commit_msg>Raise KeyboardInterrupt to allow the run to handle logout<commit_after>import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) |
fb5ad293c34387b1ab7b7b7df3aed3942fdd9282 | src/webapp/activities/forms.py | src/webapp/activities/forms.py | # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| Add default to max_places in proposal form | Add default to max_places in proposal form
| Python | agpl-3.0 | hirunatan/estelcon_web,hirunatan/estelcon_web,hirunatan/estelcon_web,hirunatan/estelcon_web | # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
Add default to max_places in proposal form | # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| <commit_before># -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
<commit_msg>Add default to max_places in proposal form<commit_after> | # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
Add default to max_places in proposal form# -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| <commit_before># -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
<commit_msg>Add default to max_places in proposal form<commit_after># -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
|
551dddbb80d512ec49d8a422b52c24e98c97b38c | tsparser/main.py | tsparser/main.py | from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| from time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| Add waiting for new data to parse | Add waiting for new data to parse
| Python | mit | m4tx/techswarm-receiver | from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
Add waiting for new data to parse | from time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| <commit_before>from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
<commit_msg>Add waiting for new data to parse<commit_after> | from time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
Add waiting for new data to parsefrom time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| <commit_before>from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
<commit_msg>Add waiting for new data to parse<commit_after>from time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
|
d23a68d464c62cdefb76dbe5855110374680ae61 | regulations/settings/dev.py | regulations/settings/dev.py | from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| Add coverage metrics to python code | Add coverage metrics to python code
| Python | cc0-1.0 | willbarton/regulations-site,ascott1/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,grapesmoker/regulations-site,adderall/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,EricSchles/regulations-site,EricSchles/regulations-site,grapesmoker/regulations-site,eregs/regulations-site,adderall/regulations-site,eregs/regulations-site,eregs/regulations-site,willbarton/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,jeremiak/regulations-site,18F/regulations-site,jeremiak/regulations-site,ascott1/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,tadhg-ohiggins/regulations-site,adderall/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,ascott1/regulations-site,18F/regulations-site,grapesmoker/regulations-site,jeremiak/regulations-site,adderall/regulations-site | from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
Add coverage metrics to python code | from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| <commit_before>from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
<commit_msg>Add coverage metrics to python code<commit_after> | from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
Add coverage metrics to python codefrom .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| <commit_before>from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
<commit_msg>Add coverage metrics to python code<commit_after>from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
|
6d7e597ce216093d52ecdcb7db5c087dc6040bb1 | fullcalendar/conf.py | fullcalendar/conf.py | from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| Fix initiation of settings object | Fix initiation of settings object
| Python | mit | jonge-democraten/mezzanine-fullcalendar | from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
Fix initiation of settings object | from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| <commit_before>from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
<commit_msg>Fix initiation of settings object<commit_after> | from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
Fix initiation of settings objectfrom datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| <commit_before>from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
<commit_msg>Fix initiation of settings object<commit_after>from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
|
7674437d752be0791688533dd1409fa083672bb2 | genes/java/config.py | genes/java/config.py | #!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
| #!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
| Switch from dictionary to namedtuple | Switch from dictionary to namedtuple | Python | mit | hatchery/Genepool2,hatchery/genepool | #!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
Switch from dictionary to namedtuple | #!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
| <commit_before>#!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
<commit_msg>Switch from dictionary to namedtuple<commit_after> | #!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
| #!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
Switch from dictionary to namedtuple#!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
| <commit_before>#!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
<commit_msg>Switch from dictionary to namedtuple<commit_after>#!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
|
d5cb2a37ea77b15c5725d6ebf8e0ab79f3bea613 | flow_workflow/historian/service_interface.py | flow_workflow/historian/service_interface.py | import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, peration_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
| import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, operation_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
name=name, workflow_plan_id=workflow_plan_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
| Fix interface in historian service interface | Fix interface in historian service interface
| Python | agpl-3.0 | genome/flow-workflow,genome/flow-workflow,genome/flow-workflow | import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, peration_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
Fix interface in historian service interface | import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, operation_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
name=name, workflow_plan_id=workflow_plan_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
| <commit_before>import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, peration_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
<commit_msg>Fix interface in historian service interface<commit_after> | import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, operation_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
name=name, workflow_plan_id=workflow_plan_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
| import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, peration_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
Fix interface in historian service interfaceimport logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, operation_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
name=name, workflow_plan_id=workflow_plan_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
| <commit_before>import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, peration_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
<commit_msg>Fix interface in historian service interface<commit_after>import logging
from flow_workflow.historian.messages import UpdateMessage
LOG = logging.getLogger(__name__)
class WorkflowHistorianServiceInterface(object):
def __init__(self,
broker=None,
exchange=None,
routing_key=None):
self.broker = broker
self.exchange = exchange
self.routing_key = routing_key
def update(self, net_key, operation_id, name, workflow_plan_id, **kwargs):
if workflow_plan_id < 0:
# ignore update (don't even make message)
LOG.debug("Received negative workflow_plan_id:%s, "
"ignoring update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
workflow_plan_id, net_key, peration_id, name,
workflow_plan_id, kwargs)
else:
LOG.debug("Sending update (net_key=%s, operation_id=%s, name=%s,"
"workflow_plan_id=%s, kwargs=%s)",
net_key, operation_id, name, workflow_plan_id, kwargs)
message = UpdateMessage(net_key=net_key, operation_id=operation_id,
name=name, workflow_plan_id=workflow_plan_id,
**kwargs)
self.broker.publish(self.exchange, self.routing_key, message)
|
91a77b860387ebed146b9e4e604d007bfabf0b9e | lib/ansible/plugins/action/normal.py | lib/ansible/plugins/action/normal.py | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
| # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
| Fix potential bug in parameter passing | Fix potential bug in parameter passing
| Python | mit | thaim/ansible,thaim/ansible | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
Fix potential bug in parameter passing | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
| <commit_before># (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
<commit_msg>Fix potential bug in parameter passing<commit_after> | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
| # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
Fix potential bug in parameter passing# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
| <commit_before># (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
<commit_msg>Fix potential bug in parameter passing<commit_after># (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
results = self._execute_module(tmp=tmp, task_vars=task_vars)
# Remove special fields from the result, which can only be set
# internally by the executor engine. We do this only here in
# the 'normal' action, as other action plugins may set this.
for field in ('ansible_notify',):
if field in results:
results.pop(field)
return results
|
e0db9a970c6ea778419cc1f20ca66adedffb7aae | utils/mwm.py | utils/mwm.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(HOME=tmpdir, MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash')
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
| Set HOME, allow errors to pass through to stdout/stderr | Set HOME, allow errors to pass through to stdout/stderr
| Python | bsd-3-clause | hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
Set HOME, allow errors to pass through to stdout/stderr | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(HOME=tmpdir, MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash')
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
<commit_msg>Set HOME, allow errors to pass through to stdout/stderr<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(HOME=tmpdir, MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash')
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
Set HOME, allow errors to pass through to stdout/stderr# -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(HOME=tmpdir, MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash')
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
<commit_msg>Set HOME, allow errors to pass through to stdout/stderr<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import shutil
import subprocess
import tempfile
from string import Template
from .artifact import Artifact
LOG = logging.getLogger(__name__)
class MWM(object):
name = 'mwm'
description = 'maps.me MWM'
cmd = Template('generate_mwm.sh $input')
def __init__(self, input):
"""
Initialize the MWM generation utility.
Args:
pbf: the source PBF
"""
self.input = input
self.output = os.path.splitext(input)[0] + '.mwm'
def run(self):
if self.is_complete:
LOG.debug("Skipping MWM, file exists")
return
convert_cmd = self.cmd.safe_substitute({
'input': self.input,
})
LOG.debug('Running: %s' % convert_cmd)
tmpdir = tempfile.mkdtemp()
env = os.environ.copy()
env.update(HOME=tmpdir, MWM_WRITABLE_DIR=tmpdir, TARGET=os.path.dirname(self.output))
try:
subprocess.check_call(
convert_cmd,
env=env,
shell=True,
executable='/bin/bash')
LOG.debug('generate_mwm.sh complete')
finally:
shutil.rmtree(tmpdir)
@property
def results(self):
return [Artifact([self.output], self.name)]
@property
def is_complete(self):
return os.path.isfile(self.output)
|
d0ce2b074ffd603c507069d8a5ab1189fad0ca56 | pywikibot/families/wikia_family.py | pywikibot/families/wikia_family.py | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| Update a version number from trunk r9016 | Update a version number from trunk r9016
https://mediawiki.org/wiki/Special:Code/pywikipedia/9040
| Python | mit | VcamX/pywikibot-core,npdoty/pywikibot,npdoty/pywikibot,emijrp/pywikibot-core,darthbhyrava/pywikibot-local,magul/pywikibot-core,magul/pywikibot-core,h4ck3rm1k3/pywikibot-core,trishnaguha/pywikibot-core,xZise/pywikibot-core,valhallasw/pywikibot-core,Darkdadaah/pywikibot-core,icyflame/batman,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,happy5214/pywikibot-core,Darkdadaah/pywikibot-core,smalyshev/pywikibot-core,happy5214/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,TridevGuha/pywikibot-core,jayvdb/pywikibot-core,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016
https://mediawiki.org/wiki/Special:Code/pywikipedia/9040 | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| <commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016
https://mediawiki.org/wiki/Special:Code/pywikipedia/9040<commit_after> | # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
Update a version number from trunk r9016
https://mediawiki.org/wiki/Special:Code/pywikipedia/9040# -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
| <commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.15.1"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
<commit_msg>Update a version number from trunk r9016
https://mediawiki.org/wiki/Special:Code/pywikipedia/9040<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
import family
# The Wikia Search family
# user-config.py: usernames['wikia']['wikia'] = 'User name'
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = u'wikia'
self.langs = {
u'wikia': None,
}
def hostname(self, code):
return u'www.wikia.com'
def version(self, code):
return "1.16.2"
def scriptpath(self, code):
return ''
def apipath(self, code):
return '/api.php'
|
1657e46cd5c2a81df4cbb73b292b0bf9072d5c51 | h2o-py/tests/testdir_tree/pyunit_tree_irf.py | h2o-py/tests/testdir_tree/pyunit_tree_irf.py | import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
| import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
| Fix test: make sure that Isolation Forest actually make a categorical split | Fix test: make sure that Isolation Forest actually make a categorical split
| Python | apache-2.0 | h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3 | import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
Fix test: make sure that Isolation Forest actually make a categorical split | import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
| <commit_before>import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
<commit_msg>Fix test: make sure that Isolation Forest actually make a categorical split<commit_after> | import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
| import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
Fix test: make sure that Isolation Forest actually make a categorical splitimport h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
| <commit_before>import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate.csv"))
prostate["RACE"] = prostate["RACE"].asfactor()
iso_model = H2OIsolationForestEstimator()
iso_model.train(training_frame = prostate, x = list(set(prostate.col_names) - set(["ID", "CAPSULE"])))
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
<commit_msg>Fix test: make sure that Isolation Forest actually make a categorical split<commit_after>import h2o
from h2o.tree import H2OTree
from h2o.estimators import H2OIsolationForestEstimator
from tests import pyunit_utils
def check_tree(tree, tree_number, tree_class = None):
assert tree is not None
assert len(tree) > 0
assert tree._tree_number == tree_number
assert tree._tree_class == tree_class
assert tree.root_node is not None
assert tree.left_children is not None
assert tree.right_children is not None
assert tree.thresholds is not None
assert tree.nas is not None
assert tree.descriptions is not None
assert tree.node_ids is not None
assert tree.model_id is not None
assert tree.levels is not None
assert tree.root_node.na_direction is not None
assert tree.root_node.id is not None
def irf_tree_Test():
cat_frame = h2o.create_frame(cols=10, categorical_fraction=1, seed=42)
# check all columns are categorical
assert set(cat_frame.types.values()) == set(['enum'])
iso_model = H2OIsolationForestEstimator(seed=42)
iso_model.train(training_frame=cat_frame)
tree = H2OTree(iso_model, 5)
check_tree(tree, 5, None)
print(tree)
if __name__ == "__main__":
pyunit_utils.standalone_test(irf_tree_Test)
else:
irf_tree_Test()
|
fc7f51877b6b991ad5a25afb755dd7a35e91dfea | cla_backend/apps/legalaid/migrations/0022_default_contact_for_research_methods.py | cla_backend/apps/legalaid/migrations/0022_default_contact_for_research_methods.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.create(method=value, reference=uuid.uuid4()).save()
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.get_or_create(method=value, defaults={"reference": uuid.uuid4()})
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
| Use get_or_create to avoid duplicate objects | Use get_or_create to avoid duplicate objects
| Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.create(method=value, reference=uuid.uuid4()).save()
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
Use get_or_create to avoid duplicate objects | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.get_or_create(method=value, defaults={"reference": uuid.uuid4()})
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.create(method=value, reference=uuid.uuid4()).save()
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
<commit_msg>Use get_or_create to avoid duplicate objects<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.get_or_create(method=value, defaults={"reference": uuid.uuid4()})
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.create(method=value, reference=uuid.uuid4()).save()
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
Use get_or_create to avoid duplicate objects# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.get_or_create(method=value, defaults={"reference": uuid.uuid4()})
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.create(method=value, reference=uuid.uuid4()).save()
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
<commit_msg>Use get_or_create to avoid duplicate objects<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import uuid
from cla_common.constants import RESEARCH_CONTACT_VIA
def create_default_contact_for_research_methods(apps, schema_editor):
ContactResearchMethods = apps.get_model("legalaid", "ContactResearchMethod")
for value, name in RESEARCH_CONTACT_VIA:
ContactResearchMethods.objects.get_or_create(method=value, defaults={"reference": uuid.uuid4()})
def rollback_default_contact_for_research_methods(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0021_auto_20190515_1042")]
operations = [
migrations.RunPython(
create_default_contact_for_research_methods, rollback_default_contact_for_research_methods
)
]
|
4f2fa4e43b314c9d05e0b9b9e73641463c16a9cb | server/proposal/__init__.py | server/proposal/__init__.py | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
| from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
tasks.set_up_hooks()
| Set up the proposal tasks on app startup | Set up the proposal tasks on app startup
| Python | mit | cityofsomerville/citydash,codeforboston/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/cornerwise,cityofsomerville/cornerwise,codeforboston/cornerwise,cityofsomerville/citydash,cityofsomerville/citydash,codeforboston/cornerwise,cityofsomerville/cornerwise,cityofsomerville/cornerwise | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
Set up the proposal tasks on app startup | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
tasks.set_up_hooks()
| <commit_before>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
<commit_msg>Set up the proposal tasks on app startup<commit_after> | from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
tasks.set_up_hooks()
| from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
Set up the proposal tasks on app startupfrom django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
tasks.set_up_hooks()
| <commit_before>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
<commit_msg>Set up the proposal tasks on app startup<commit_after>from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
tasks.set_up_hooks()
|
833f8ce0673701eb64fb20ee067ccd8c58e473c6 | child_sync_typo3/wizard/child_depart_wizard.py | child_sync_typo3/wizard/child_depart_wizard.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| Correct wrong inheritance on sponsorship_typo3 child_depart wizard. | Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
| Python | agpl-3.0 | MickSandoz/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,ndtran/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Correct wrong inheritance on sponsorship_typo3 child_depart wizard. | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
<commit_msg>Correct wrong inheritance on sponsorship_typo3 child_depart wizard.<commit_after> | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
<commit_msg>Correct wrong inheritance on sponsorship_typo3 child_depart wizard.<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
2192219d92713c6eb76593d0c6c29413d040db6a | scripts/cronRefreshEdxQualtrics.py | scripts/cronRefreshEdxQualtrics.py | from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| Revert "Added script for cron job to load surveys to database." | Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.
| Python | bsd-3-clause | paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation | from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf. | from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| <commit_before>from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.<commit_after> | from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| <commit_before>from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Added script for cron job to load surveys to database."
This reverts commit 34e5560437348e5cfeab589b783c9cc524aa2abf.<commit_after>from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
899e3c9f81a43dcb94e290ce0a86f128bd94effd | opps/channel/context_processors.py | opps/channel/context_processors.py | # -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
| # -*- coding: utf-8 -*-
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
| Apply filter channel published on menu list (channel context processors) | Apply filter channel published on menu list (channel context processors)
| Python | mit | YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps | # -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
Apply filter channel published on menu list (channel context processors) | # -*- coding: utf-8 -*-
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
| <commit_before># -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
<commit_msg>Apply filter channel published on menu list (channel context processors)<commit_after> | # -*- coding: utf-8 -*-
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
| # -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
Apply filter channel published on menu list (channel context processors)# -*- coding: utf-8 -*-
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
| <commit_before># -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
<commit_msg>Apply filter channel published on menu list (channel context processors)<commit_after># -*- coding: utf-8 -*-
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
|
c55bf8d153c47500615b8ded3c95957be8ee70a3 | froide/helper/json_view.py | froide/helper/json_view.py | from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
| from django import http
from django.views.generic import DetailView, ListView
class JSONResponseMixin(object):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
class JSONResponseListView(ListView, JSONResponseMixin):
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseListView, self).get_context_data(**kwargs)
return context
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return "[%s]" % ",".join([o.as_json() for o in context['object_list']])
class JSONResponseDetailView(DetailView, JSONResponseMixin):
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseDetailView, self).get_context_data(**kwargs)
return context
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
| Refactor JSONResponse views to include ListView | Refactor JSONResponse views to include ListView | Python | mit | okfse/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,catcosmo/froide,ryankanno/froide,stefanw/froide,okfse/froide,catcosmo/froide,ryankanno/froide,fin/froide,LilithWittmann/froide,okfse/froide,ryankanno/froide,ryankanno/froide,LilithWittmann/froide,fin/froide,CodeforHawaii/froide,LilithWittmann/froide,stefanw/froide,catcosmo/froide,okfse/froide,catcosmo/froide,LilithWittmann/froide,CodeforHawaii/froide,okfse/froide,catcosmo/froide,ryankanno/froide,fin/froide,fin/froide,CodeforHawaii/froide,CodeforHawaii/froide,stefanw/froide,stefanw/froide | from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
Refactor JSONResponse views to include ListView | from django import http
from django.views.generic import DetailView, ListView
class JSONResponseMixin(object):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
class JSONResponseListView(ListView, JSONResponseMixin):
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseListView, self).get_context_data(**kwargs)
return context
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return "[%s]" % ",".join([o.as_json() for o in context['object_list']])
class JSONResponseDetailView(DetailView, JSONResponseMixin):
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseDetailView, self).get_context_data(**kwargs)
return context
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
| <commit_before>from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
<commit_msg>Refactor JSONResponse views to include ListView<commit_after> | from django import http
from django.views.generic import DetailView, ListView
class JSONResponseMixin(object):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
class JSONResponseListView(ListView, JSONResponseMixin):
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseListView, self).get_context_data(**kwargs)
return context
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return "[%s]" % ",".join([o.as_json() for o in context['object_list']])
class JSONResponseDetailView(DetailView, JSONResponseMixin):
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseDetailView, self).get_context_data(**kwargs)
return context
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
| from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
Refactor JSONResponse views to include ListViewfrom django import http
from django.views.generic import DetailView, ListView
class JSONResponseMixin(object):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
class JSONResponseListView(ListView, JSONResponseMixin):
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseListView, self).get_context_data(**kwargs)
return context
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return "[%s]" % ",".join([o.as_json() for o in context['object_list']])
class JSONResponseDetailView(DetailView, JSONResponseMixin):
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseDetailView, self).get_context_data(**kwargs)
return context
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
| <commit_before>from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
<commit_msg>Refactor JSONResponse views to include ListView<commit_after>from django import http
from django.views.generic import DetailView, ListView
class JSONResponseMixin(object):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
class JSONResponseListView(ListView, JSONResponseMixin):
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseListView, self).get_context_data(**kwargs)
return context
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return "[%s]" % ",".join([o.as_json() for o in context['object_list']])
class JSONResponseDetailView(DetailView, JSONResponseMixin):
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def get_context_data(self, **kwargs):
self.format = "html"
if "format" in self.kwargs:
self.format = self.kwargs['format']
context = super(JSONResponseDetailView, self).get_context_data(**kwargs)
return context
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
|
2cde35bb6f948f861026921daf7fe24b353af273 | kerrokantasi/settings/__init__.py | kerrokantasi/settings/__init__.py | from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| Add bulleted and numbered list to CKEditor | Add bulleted and numbered list to CKEditor
Closes #180
| Python | mit | vikoivun/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,vikoivun/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi | from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
Add bulleted and numbered list to CKEditor
Closes #180 | from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| <commit_before>from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
<commit_msg>Add bulleted and numbered list to CKEditor
Closes #180<commit_after> | from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
Add bulleted and numbered list to CKEditor
Closes #180from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| <commit_before>from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
<commit_msg>Add bulleted and numbered list to CKEditor
Closes #180<commit_after>from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
|
38eb6221ca41446c0c4fb1510354bdc4f00ba5f1 | serfnode/build/handler/launcher.py | serfnode/build/handler/launcher.py | #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
| #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
cid = open('/child_{}'.format(name)).read().strip()
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
cid = open('/child_{}'.format(name)).read().strip()
except IOError:
cid = name
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
| Remove children via uid rather than name | Remove children via uid rather than name | Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
Remove children via uid rather than name | #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
cid = open('/child_{}'.format(name)).read().strip()
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
cid = open('/child_{}'.format(name)).read().strip()
except IOError:
cid = name
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
| <commit_before>#!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
<commit_msg>Remove children via uid rather than name<commit_after> | #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
cid = open('/child_{}'.format(name)).read().strip()
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
cid = open('/child_{}'.format(name)).read().strip()
except IOError:
cid = name
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
| #!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
Remove children via uid rather than name#!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
cid = open('/child_{}'.format(name)).read().strip()
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
cid = open('/child_{}'.format(name)).read().strip()
except IOError:
cid = name
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
| <commit_before>#!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
<commit_msg>Remove children via uid rather than name<commit_after>#!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
cid = open('/child_{}'.format(name)).read().strip()
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
cid = open('/child_{}'.format(name)).read().strip()
except IOError:
cid = name
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(cid, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
|
bca6f6041e9f49d1d25d7a9c4cb88080d88c45b1 | dumper/invalidation.py | dumper/invalidation.py | import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
| import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
'''
Each path can actually have multiple cached entries, varying based on different HTTP
methods. So a GET request will have a different cached response from a HEAD request.
In order to invalidate a path, we must first know all the different cache keys that the
path might have been cached at. This returns those keys
'''
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
| Comment concerning differences in keys per path | Comment concerning differences in keys per path | Python | mit | saulshanabrook/django-dumper | import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
Comment concerning differences in keys per path | import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
'''
Each path can actually have multiple cached entries, varying based on different HTTP
methods. So a GET request will have a different cached response from a HEAD request.
In order to invalidate a path, we must first know all the different cache keys that the
path might have been cached at. This returns those keys
'''
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
| <commit_before>import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
<commit_msg>Comment concerning differences in keys per path<commit_after> | import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
'''
Each path can actually have multiple cached entries, varying based on different HTTP
methods. So a GET request will have a different cached response from a HEAD request.
In order to invalidate a path, we must first know all the different cache keys that the
path might have been cached at. This returns those keys
'''
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
| import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
Comment concerning differences in keys per pathimport dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
'''
Each path can actually have multiple cached entries, varying based on different HTTP
methods. So a GET request will have a different cached response from a HEAD request.
In order to invalidate a path, we must first know all the different cache keys that the
path might have been cached at. This returns those keys
'''
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
| <commit_before>import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
<commit_msg>Comment concerning differences in keys per path<commit_after>import dumper.utils
def invalidate_paths(paths):
'''
Invalidate all pages for a certain path.
'''
for path in paths:
for key in all_cache_keys_from_path(path):
dumper.utils.cache.delete(key)
def all_cache_keys_from_path(path):
'''
Each path can actually have multiple cached entries, varying based on different HTTP
methods. So a GET request will have a different cached response from a HEAD request.
In order to invalidate a path, we must first know all the different cache keys that the
path might have been cached at. This returns those keys
'''
return [dumper.utils.cache_key(path, method) for method in dumper.settings.CACHABLE_METHODS]
|
b0806c0b8b950a3007107cc58fb21e504cf09427 | homedisplay/control_milight/management/commands/listen_433.py | homedisplay/control_milight/management/commands/listen_433.py | from django.core.management.base import BaseCommand, CommandError
from control_milight.utils import process_automatic_trigger
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial("/dev/tty.usbserial-A9007LzM", 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
| from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
| Move serial device path to settings | Move serial device path to settings
| Python | bsd-3-clause | ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display | from django.core.management.base import BaseCommand, CommandError
from control_milight.utils import process_automatic_trigger
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial("/dev/tty.usbserial-A9007LzM", 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
Move serial device path to settings | from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from control_milight.utils import process_automatic_trigger
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial("/dev/tty.usbserial-A9007LzM", 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
<commit_msg>Move serial device path to settings<commit_after> | from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
| from django.core.management.base import BaseCommand, CommandError
from control_milight.utils import process_automatic_trigger
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial("/dev/tty.usbserial-A9007LzM", 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
Move serial device path to settingsfrom control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from control_milight.utils import process_automatic_trigger
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial("/dev/tty.usbserial-A9007LzM", 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
<commit_msg>Move serial device path to settings<commit_after>from control_milight.utils import process_automatic_trigger
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import serial
import time
class Command(BaseCommand):
args = ''
help = 'Listen for 433MHz radio messages'
ITEM_MAP = {
"5236713": "kitchen",
"7697747": "hall",
"1328959": "front-door",
"247615": "unused-magnetic-switch",
"8981913": "table",
}
def handle(self, *args, **options):
s = serial.Serial(settings.ARDUINO_433, 9600)
sent_event_map = {}
while True:
line = s.readline()
print "- %s" % line
if line.startswith("Received "):
id = line.split(" ")[1]
if id in self.ITEM_MAP:
item_name = self.ITEM_MAP[id]
if item_name in sent_event_map:
if sent_event_map[item_name] > time.time() - 5:
print "Too recent event: %s" % item_name
continue
process_automatic_trigger(item_name)
sent_event_map[item_name] = time.time()
else:
print "Unknown id: %s" % id
|
6231afb51f5653e210f41d47c66797c4bd4d738d | accounts/views.py | accounts/views.py | # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
| # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name', 'username']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
| Make it possible for the user to change username | Make it possible for the user to change username
| Python | agpl-3.0 | christophmeissner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,coders4help/volunteer_planner,alper/volunteer_planner,alper/volunteer_planner,alper/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,volunteer-planner/volunteer_planner,coders4help/volunteer_planner,volunteer-planner/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner | # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
Make it possible for the user to change username | # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name', 'username']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
| <commit_before># coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
<commit_msg>Make it possible for the user to change username<commit_after> | # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name', 'username']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
| # coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
Make it possible for the user to change username# coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name', 'username']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
| <commit_before># coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
<commit_msg>Make it possible for the user to change username<commit_after># coding: utf-8
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import UpdateView
from django.core.urlresolvers import reverse_lazy
from volunteer_planner.utils import LoginRequiredMixin
@login_required()
def user_account_detail(request):
user = request.user
return render(request, 'user_detail.html', {'user': user})
class AccountUpdateView(LoginRequiredMixin, UpdateView):
"""
Allows a user to update their profile.
"""
fields = ['first_name', 'last_name', 'username']
template_name = "user_account_edit.html"
success_url = reverse_lazy('account_detail')
def get_object(self, queryset=None):
return self.request.user
|
7c75da48d6746fc148a79051338c3cd554d75615 | accounts/views.py | accounts/views.py | from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
r = request.GET.get('r', '')
auth_logout(request)
if r:
return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
| from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
next = request.GET.get('next', '')
auth_logout(request)
if next:
return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
| Change variable name to next for logout function | Change variable name to next for logout function
| Python | agpl-3.0 | openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms | from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
r = request.GET.get('r', '')
auth_logout(request)
if r:
return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
Change variable name to next for logout function | from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
next = request.GET.get('next', '')
auth_logout(request)
if next:
return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
| <commit_before>from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
r = request.GET.get('r', '')
auth_logout(request)
if r:
return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
<commit_msg>Change variable name to next for logout function<commit_after> | from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
next = request.GET.get('next', '')
auth_logout(request)
if next:
return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
| from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
r = request.GET.get('r', '')
auth_logout(request)
if r:
return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
Change variable name to next for logout functionfrom django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
next = request.GET.get('next', '')
auth_logout(request)
if next:
return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
| <commit_before>from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
r = request.GET.get('r', '')
auth_logout(request)
if r:
return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
<commit_msg>Change variable name to next for logout function<commit_after>from django.shortcuts import redirect
from django.contrib.auth import logout as auth_logout
from django.conf import settings
def logout(request):
"""Logs out user redirects if in request"""
next = request.GET.get('next', '')
auth_logout(request)
if next:
return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next))
else:
return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
|
41ea0dd8c48ef8a336422482e9bbd1911bb7e168 | Commitment.py | Commitment.py | import sublime
import sublime_plugin
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) | import sublime
import sublime_plugin
import HTMLParser
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) | Make that it works in 90% of the cases. 3:30. | Make that it works in 90% of the cases. 3:30.
| Python | mit | janraasch/sublimetext-commitment,janraasch/sublimetext-commitment | import sublime
import sublime_plugin
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message)Make that it works in 90% of the cases. 3:30. | import sublime
import sublime_plugin
import HTMLParser
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) | <commit_before>import sublime
import sublime_plugin
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message)<commit_msg>Make that it works in 90% of the cases. 3:30.<commit_after> | import sublime
import sublime_plugin
import HTMLParser
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) | import sublime
import sublime_plugin
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message)Make that it works in 90% of the cases. 3:30.import sublime
import sublime_plugin
import HTMLParser
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) | <commit_before>import sublime
import sublime_plugin
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = commit.get('message', '')
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message)<commit_msg>Make that it works in 90% of the cases. 3:30.<commit_after>import sublime
import sublime_plugin
import HTMLParser
from commit import Commitment
whatthecommit = 'http://whatthecommit.com/'
randomMessages = Commitment()
class CommitmentToClipboardCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.set_clipboard(message)
class CommitmentToStatusBarCommand(sublime_plugin.WindowCommand):
def run(self):
commit = randomMessages.get()
message = HTMLParser.HTMLParser().unescape(commit.get('message', '').replace('\n','').replace('<br/>', '\n'))
message_hash = commit.get('message_hash', '')
if message:
print 'Commitment: ' + '\n' + message + '\n' + 'Permalink: ' + whatthecommit + message_hash
sublime.status_message(message) |
8fd65190a2a68a7afeab91b0a02c83309f72ccd6 | tests/test_testing.py | tests/test_testing.py |
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout(self):
assert greenado.gyield(coroutine()) == 1234
|
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test
@greenado.generator
def test_without_timeout2(self):
assert (yield coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
@greenado.generator
def test_with_timeout2(self):
assert (yield coroutine()) == 1234
| Add tests to gen_test for generator, seems to work | Add tests to gen_test for generator, seems to work
| Python | apache-2.0 | virtuald/greenado,virtuald/greenado |
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout(self):
assert greenado.gyield(coroutine()) == 1234
Add tests to gen_test for generator, seems to work |
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test
@greenado.generator
def test_without_timeout2(self):
assert (yield coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
@greenado.generator
def test_with_timeout2(self):
assert (yield coroutine()) == 1234
| <commit_before>
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout(self):
assert greenado.gyield(coroutine()) == 1234
<commit_msg>Add tests to gen_test for generator, seems to work<commit_after> |
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test
@greenado.generator
def test_without_timeout2(self):
assert (yield coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
@greenado.generator
def test_with_timeout2(self):
assert (yield coroutine()) == 1234
|
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout(self):
assert greenado.gyield(coroutine()) == 1234
Add tests to gen_test for generator, seems to work
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test
@greenado.generator
def test_without_timeout2(self):
assert (yield coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
@greenado.generator
def test_with_timeout2(self):
assert (yield coroutine()) == 1234
| <commit_before>
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout(self):
assert greenado.gyield(coroutine()) == 1234
<commit_msg>Add tests to gen_test for generator, seems to work<commit_after>
import greenado
from greenado.testing import gen_test
from tornado.testing import AsyncTestCase
from tornado import gen
@gen.coroutine
def coroutine():
raise gen.Return(1234)
class GreenadoTests(AsyncTestCase):
@gen_test
def test_without_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test
@greenado.generator
def test_without_timeout2(self):
assert (yield coroutine()) == 1234
@gen_test(timeout=5)
def test_with_timeout1(self):
assert greenado.gyield(coroutine()) == 1234
@gen_test(timeout=5)
@greenado.generator
def test_with_timeout2(self):
assert (yield coroutine()) == 1234
|
f1b22cfcca8470a59a7bab261bbd2a46a7c2a2ed | socib_cms/cmsutils/utils.py | socib_cms/cmsutils/utils.py | # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return "/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return "/{lang}{url}".format(
lang=language,
url=url)
return url
| # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return u"/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return u"/{lang}{url}".format(
lang=language,
url=url)
return url
| Fix unicode issues at url translation | Fix unicode issues at url translation
| Python | mit | socib/django-socib-cms,socib/django-socib-cms | # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return "/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return "/{lang}{url}".format(
lang=language,
url=url)
return url
Fix unicode issues at url translation | # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return u"/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return u"/{lang}{url}".format(
lang=language,
url=url)
return url
| <commit_before># coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return "/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return "/{lang}{url}".format(
lang=language,
url=url)
return url
<commit_msg>Fix unicode issues at url translation<commit_after> | # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return u"/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return u"/{lang}{url}".format(
lang=language,
url=url)
return url
| # coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return "/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return "/{lang}{url}".format(
lang=language,
url=url)
return url
Fix unicode issues at url translation# coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return u"/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return u"/{lang}{url}".format(
lang=language,
url=url)
return url
| <commit_before># coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return "/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return "/{lang}{url}".format(
lang=language,
url=url)
return url
<commit_msg>Fix unicode issues at url translation<commit_after># coding: utf-8
import re
from django.core.urlresolvers import reverse
from django.conf import settings
def reverse_no_i18n(viewname, *args, **kwargs):
result = reverse(viewname, *args, **kwargs)
m = re.match(r'(/[^/]*)(/.*$)', result)
return m.groups()[1]
def change_url_language(url, language):
if hasattr(settings, 'LANGUAGES'):
languages = [lang[0] for lang in settings.LANGUAGES]
m = re.match(r'/([^/]*)(/.*$)', url)
if m and m.groups()[0] in languages:
return u"/{lang}{url}".format(
lang=language,
url=m.groups()[1])
return u"/{lang}{url}".format(
lang=language,
url=url)
return url
|
d2fb1f22be6c6434873f2bcafb6b8a9b714acde9 | website/archiver/decorators.py | website/archiver/decorators.py | import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
| import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
| Use fail signal in fail_archive_on_error decorator | Use fail signal in fail_archive_on_error decorator
| Python | apache-2.0 | amyshi188/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,mluke93/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,jeffreyliu3230/osf.io,chrisseto/osf.io,acshi/osf.io,mattclark/osf.io,billyhunt/osf.io,caneruguz/osf.io,cosenal/osf.io,SSJohns/osf.io,njantrania/osf.io,mattclark/osf.io,alexschiller/osf.io,samchrisinger/osf.io,HarryRybacki/osf.io,MerlinZhang/osf.io,mluo613/osf.io,TomBaxter/osf.io,mattclark/osf.io,kch8qx/osf.io,baylee-d/osf.io,chennan47/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,acshi/osf.io,bdyetton/prettychart,danielneis/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,emetsger/osf.io,reinaH/osf.io,ticklemepierce/osf.io,felliott/osf.io,hmoco/osf.io,SSJohns/osf.io,danielneis/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,TomHeatwole/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,cldershem/osf.io,adlius/osf.io,laurenrevere/osf.io,jolene-esposito/osf.io,sloria/osf.io,Johnetordoff/osf.io,doublebits/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,billyhunt/osf.io,crcresearch/osf.io,njantrania/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,mfraezz/osf.io,hmoco/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,danielneis/osf.io,aaxelb/osf.io,Nesiehr/osf.io,caseyrygt/osf.io,kwierman/osf.io,cldershem/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,fabianvf/osf.io,amyshi188/osf.io,petermalcolm/osf.io,adlius/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,rdhyee/osf.io,samanehsan/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,dplorimer/osf,leb2dg/osf.io,mfraezz/osf.io,abought/osf.io,amyshi188/osf.io,doublebits/osf.io,sbt9uc/osf.io,lyndsysimon/osf.io,dplorimer/osf,caneruguz/osf.io,laurenrevere/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,baylee-d/osf.io,GageGaskins/osf.io,chennan47/osf.io,fabianvf/osf.io,cldershem/osf.io,jmcarp/osf.io,jnayak1/osf.io,binoculars/osf.io,zamattiac/osf.io,acshi/osf.io,crcresearch/osf.io,jinluyuan/osf.io,jnayak1/osf.io,binoculars/osf.io,Ghalko/osf.io,jinluyuan/osf.io,cosenal/osf.io,RomanZWang/osf.io,wearpants/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,wearpants/osf.io,samchrisinger/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,abought/osf.io,zachjanicki/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,bdyetton/prettychart,MerlinZhang/osf.io,pattisdr/osf.io,chennan47/osf.io,bdyetton/prettychart,caseyrygt/osf.io,samanehsan/osf.io,pattisdr/osf.io,reinaH/osf.io,sloria/osf.io,caseyrollins/osf.io,zamattiac/osf.io,bdyetton/prettychart,caseyrollins/osf.io,TomHeatwole/osf.io,jeffreyliu3230/osf.io,cldershem/osf.io,mluo613/osf.io,KAsante95/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,billyhunt/osf.io,chrisseto/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,Ghalko/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,njantrania/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,ckc6cz/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,cwisecarver/osf.io,billyhunt/osf.io,GageGaskins/osf.io,dplorimer/osf,arpitar/osf.io,dplorimer/osf,baylee-d/osf.io,adlius/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,kwierman/osf.io,adlius/osf.io,aaxelb/osf.io,jnayak1/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,cwisecarver/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,erinspace/osf.io,arpitar/osf.io,icereval/osf.io,felliott/osf.io,KAsante95/osf.io,danielneis/osf.io,leb2dg/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,mluo613/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,jeffreyliu3230/osf.io,zachjanicki/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,njantrania/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,cosenal/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,hmoco/osf.io,reinaH/osf.io,Ghalko/osf.io,icereval/osf.io,cslzchen/osf.io,arpitar/osf.io,reinaH/osf.io,zachjanicki/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,alexschiller/osf.io,GageGaskins/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,rdhyee/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,mluo613/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,erinspace/osf.io,kwierman/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,leb2dg/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,samanehsan/osf.io,wearpants/osf.io,abought/osf.io,ckc6cz/osf.io,crcresearch/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,binoculars/osf.io,kch8qx/osf.io,icereval/osf.io,mluke93/osf.io,Johnetordoff/osf.io,jmcarp/osf.io,mluo613/osf.io,acshi/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,Nesiehr/osf.io,kch8qx/osf.io,mluke93/osf.io,mfraezz/osf.io,TomBaxter/osf.io,samanehsan/osf.io,mluke93/osf.io,arpitar/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,cslzchen/osf.io,sbt9uc/osf.io,ZobairAlijan/osf.io,haoyuchen1992/osf.io,jinluyuan/osf.io,alexschiller/osf.io,jnayak1/osf.io,cosenal/osf.io,sloria/osf.io,HarryRybacki/osf.io,ckc6cz/osf.io,doublebits/osf.io,saradbowman/osf.io,abought/osf.io,doublebits/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,emetsger/osf.io,emetsger/osf.io,acshi/osf.io,aaxelb/osf.io | import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
Use fail signal in fail_archive_on_error decorator | import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
| <commit_before>import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
<commit_msg>Use fail signal in fail_archive_on_error decorator<commit_after> | import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
| import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
Use fail signal in fail_archive_on_error decoratorimport functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
| <commit_before>import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import utils
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
utils.handle_archive_fail(
ARCHIVER_UNCAUGHT_ERROR,
registration.registered_from,
registration,
registration.registered_user,
str(e)
)
return wrapped
<commit_msg>Use fail signal in fail_archive_on_error decorator<commit_after>import functools
from framework.exceptions import HTTPError
from website.project.decorators import _inject_nodes
from website.archiver import ARCHIVER_UNCAUGHT_ERROR
from website.archiver import signals
def fail_archive_on_error(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except HTTPError as e:
_inject_nodes(kwargs)
registration = kwargs['node']
signals.send.archive_fail(
registration,
ARCHIVER_UNCAUGHT_ERROR,
[str(e)]
)
return wrapped
|
22ae3a2e9a236de61c078d234d920a3e6bc62d7b | pylisp/application/lispd/address_tree/ddt_container_node.py | pylisp/application/lispd/address_tree/ddt_container_node.py | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
| '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| Add a bit of docs | Add a bit of docs
| Python | bsd-3-clause | steffann/pylisp | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
Add a bit of docs | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| <commit_before>'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
<commit_msg>Add a bit of docs<commit_after> | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
Add a bit of docs'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| <commit_before>'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
<commit_msg>Add a bit of docs<commit_after>'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
|
54c81494cbbe9a20db50596e68c57e1caa624043 | src-django/authentication/signals/user_post_save.py | src-django/authentication/signals/user_post_save.py | from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
| from authentication.models import UserProfile
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
user_profile = UserProfile.objects.create(user=instance, is_email_confirmed=False)
user_profile.save()
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
| Add a User post_save hook for creating user profiles | Add a User post_save hook for creating user profiles
| Python | bsd-3-clause | SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder | from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
Add a User post_save hook for creating user profiles | from authentication.models import UserProfile
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
user_profile = UserProfile.objects.create(user=instance, is_email_confirmed=False)
user_profile.save()
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
| <commit_before>from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
<commit_msg>Add a User post_save hook for creating user profiles<commit_after> | from authentication.models import UserProfile
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
user_profile = UserProfile.objects.create(user=instance, is_email_confirmed=False)
user_profile.save()
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
| from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
Add a User post_save hook for creating user profilesfrom authentication.models import UserProfile
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
user_profile = UserProfile.objects.create(user=instance, is_email_confirmed=False)
user_profile.save()
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
| <commit_before>from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
<commit_msg>Add a User post_save hook for creating user profiles<commit_after>from authentication.models import UserProfile
from django.contrib.auth.models import User, Group
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from rest_framework.authtoken.models import Token
@receiver(post_save, sender=User)
def on_user_post_save(sender, instance=None, created=False, **kwargs):
# Normally, users automatically get a Token created for them (if they do not
# already have one) when they hit
#
# rest_framework.authtoken.views.obtain_auth_token view
#
# This will create an authentication token for newly created users so the
# user registration endpoint can return a token back to Ember
# (thus avoiding the need to hit login endpoint)
if created:
user_profile = UserProfile.objects.create(user=instance, is_email_confirmed=False)
user_profile.save()
Token.objects.create(user=instance)
# Add new user to the proper user group
normal_users_group, created = Group.objects.get_or_create(name=settings.NORMAL_USER_GROUP)
instance.groups.add(normal_users_group)
|
d8cb4384f32f4d0e20f3212a36cc01915260f7a8 | tests/routers.py | tests/routers.py | """Search router."""
from rest_framework.routers import DefaultRouter, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
)
]
| """Search router."""
from rest_framework.routers import DefaultRouter, DynamicRoute, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
),
# Dynamically generated list routes. Generated using
# @action(detail=False) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=False,
initkwargs={}
),
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes. Generated using
# @action(detail=True) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
),
]
| Support custom actions in search router | Support custom actions in search router
| Python | apache-2.0 | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | """Search router."""
from rest_framework.routers import DefaultRouter, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
)
]
Support custom actions in search router | """Search router."""
from rest_framework.routers import DefaultRouter, DynamicRoute, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
),
# Dynamically generated list routes. Generated using
# @action(detail=False) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=False,
initkwargs={}
),
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes. Generated using
# @action(detail=True) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
),
]
| <commit_before>"""Search router."""
from rest_framework.routers import DefaultRouter, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
)
]
<commit_msg>Support custom actions in search router<commit_after> | """Search router."""
from rest_framework.routers import DefaultRouter, DynamicRoute, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
),
# Dynamically generated list routes. Generated using
# @action(detail=False) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=False,
initkwargs={}
),
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes. Generated using
# @action(detail=True) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
),
]
| """Search router."""
from rest_framework.routers import DefaultRouter, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
)
]
Support custom actions in search router"""Search router."""
from rest_framework.routers import DefaultRouter, DynamicRoute, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
),
# Dynamically generated list routes. Generated using
# @action(detail=False) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=False,
initkwargs={}
),
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes. Generated using
# @action(detail=True) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
),
]
| <commit_before>"""Search router."""
from rest_framework.routers import DefaultRouter, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
)
]
<commit_msg>Support custom actions in search router<commit_after>"""Search router."""
from rest_framework.routers import DefaultRouter, DynamicRoute, Route
class SearchRouter(DefaultRouter):
"""Custom router for search endpoints.
Search endpoints don't follow REST principles and thus don't need
routes that default router provides.
"""
routes = [
Route(
url=r"^{prefix}{trailing_slash}$",
mapping={"get": "list", "post": "list_with_post"},
name="{basename}",
initkwargs={},
detail=False,
),
# Dynamically generated list routes. Generated using
# @action(detail=False) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=False,
initkwargs={}
),
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
},
name='{basename}-detail',
detail=True,
initkwargs={'suffix': 'Instance'}
),
# Dynamically generated detail routes. Generated using
# @action(detail=True) decorator on methods of the viewset.
DynamicRoute(
url=r'^{prefix}/{lookup}/{url_path}{trailing_slash}$',
name='{basename}-{url_name}',
detail=True,
initkwargs={}
),
]
|
694df5ba69e4e7123009605e59c2b5417a3b52c5 | tools/fitsevt.py | tools/fitsevt.py | #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
print(nBins)
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
| #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
| Remove print statement about number of bins | Remove print statement about number of bins
| Python | mit | fauzanzaid/IUCAA-GRB-detection-Feature-extraction | #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
print(nBins)
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
Remove print statement about number of bins | #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
| <commit_before>#! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
print(nBins)
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
<commit_msg>Remove print statement about number of bins<commit_after> | #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
| #! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
print(nBins)
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
Remove print statement about number of bins#! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
| <commit_before>#! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
print(nBins)
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
<commit_msg>Remove print statement about number of bins<commit_after>#! /usr/bin/python3
import sys
import os
import math
from astropy.io import fits
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
eLo = int(sys.argv[3])
eHi = int(sys.argv[4])
binSize = int(sys.argv[5])
fnames = os.listdir(inputFolder)
for fname in fnames:
print(fname)
hdulist = fits.open(inputFolder+"/"+fname)
for i in range(1,5):
timeRange = hdulist[i].header["TSTOP"] - hdulist[i].header["TSTART"]
nBins = math.ceil(timeRange/binSize)
count = [0]*nBins
for event in hdulist[i].data:
if(event["ENERGY"]>=eLo or event["ENERGY"]<=eHi):
index = math.floor( nBins*(event["Time"] - hdulist[i].header["TSTART"])/timeRange )
count[index] += 1
sigClass = 1
with open(outputFolder+"/{0}_{1}".format(fname,i),'w') as f:
f.write("{0} {1}\n".format(nBins,sigClass))
for j in range(nBins):
f.write("{0}\n".format(count[j]))
|
d7bea2995fc54c15404b4b47cefae5fc7b0201de | partner_internal_code/res_partner.py | partner_internal_code/res_partner.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code',
copy=False,
)
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
| FIX partner internal code compatibility with sign up | FIX partner internal code compatibility with sign up
| Python | agpl-3.0 | ingadhoc/partner | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
FIX partner internal code compatibility with sign up | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code',
copy=False,
)
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
<commit_msg>FIX partner internal code compatibility with sign up<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code',
copy=False,
)
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
FIX partner internal code compatibility with sign up# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code',
copy=False,
)
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code')
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
<commit_msg>FIX partner internal code compatibility with sign up<commit_after># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class partner(models.Model):
""""""
_inherit = 'res.partner'
internal_code = fields.Char(
'Internal Code',
copy=False,
)
# we let this to base nane search improoved
# def name_search(self, cr, uid, name, args=None,
# operator='ilike', context=None, limit=100):
# args = args or []
# res = []
# if name:
# recs = self.search(
# cr, uid, [('internal_code', operator, name)] + args,
# limit=limit, context=context)
# res = self.name_get(cr, uid, recs)
# res += super(partner, self).name_search(
# cr, uid,
# name=name, args=args, operator=operator, limit=limit)
# return res
@api.model
def create(self, vals):
if not vals.get('internal_code', False):
vals['internal_code'] = self.env[
'ir.sequence'].next_by_code('partner.internal.code') or '/'
return super(partner, self).create(vals)
_sql_constraints = {
('internal_code_uniq', 'unique(internal_code)',
'Internal Code mast be unique!')
}
|
da05fe2d41a077276946c5d6c86995c60315e093 | src/auspex/instruments/__init__.py | src/auspex/instruments/__init__.py | import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
| import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager("@py")
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager("@py")
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
| Make sure we load pyvisa-py when enumerating instruments. | Make sure we load pyvisa-py when enumerating instruments.
| Python | apache-2.0 | BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex | import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
Make sure we load pyvisa-py when enumerating instruments. | import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager("@py")
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager("@py")
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
| <commit_before>import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
<commit_msg>Make sure we load pyvisa-py when enumerating instruments.<commit_after> | import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager("@py")
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager("@py")
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
| import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
Make sure we load pyvisa-py when enumerating instruments.import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager("@py")
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager("@py")
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
| <commit_before>import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager()
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager()
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
<commit_msg>Make sure we load pyvisa-py when enumerating instruments.<commit_after>import pkgutil
import importlib
import pyvisa
instrument_map = {}
for loader, name, is_pkg in pkgutil.iter_modules(__path__):
module = importlib.import_module('auspex.instruments.' + name)
if hasattr(module, "__all__"):
globals().update((name, getattr(module, name)) for name in module.__all__)
for name in module.__all__:
instrument_map.update({name:getattr(module,name)})
def enumerate_visa_instruments():
rm = pyvisa.ResourceManager("@py")
print(rm.list_resources())
def probe_instrument_ids():
rm = pyvisa.ResourceManager("@py")
for instr_label in rm.list_resources():
instr = rm.open_resource(instr_label)
try:
print(instr_label, instr.query('*IDN?'))
except:
print(instr_label, "Did not respond")
instr.close()
|
4eb4a2eaa42cd71bf4427bdaaa1e853975432691 | graphene/storage/intermediate/general_store_manager.py | graphene/storage/intermediate/general_store_manager.py | from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
| from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
| Allow keyword arguments in GeneralStoreManager.create_item method | Allow keyword arguments in GeneralStoreManager.create_item method
| Python | apache-2.0 | PHB-CS123/graphene,PHB-CS123/graphene,PHB-CS123/graphene | from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
Allow keyword arguments in GeneralStoreManager.create_item method | from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
| <commit_before>from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
<commit_msg>Allow keyword arguments in GeneralStoreManager.create_item method<commit_after> | from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
| from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
Allow keyword arguments in GeneralStoreManager.create_item methodfrom graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
| <commit_before>from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
<commit_msg>Allow keyword arguments in GeneralStoreManager.create_item method<commit_after>from graphene.storage.id_store import *
class GeneralStoreManager:
"""
Handles the creation/deletion of nodes to the NodeStore with ID recycling
"""
def __init__(self, store):
"""
Creates an instance of the GeneralStoreManager
:param store: Store to manage
:return: General store manager to handle index recycling
:rtype: GeneralStoreManager
"""
self.store = store
self.idStore = IdStore(store.FILE_NAME + ".id")
def create_item(self, **kwargs):
"""
Creates an item with the type of the store being managed
:return: New item with type STORE_TYPE
"""
# Check for an available ID from the IdStore
available_id = self.idStore.get_id()
# If no ID is available, get the last index of the file
if available_id == IdStore.NO_ID:
available_id = self.store.get_last_file_index()
# Create a type based on the type our store stores
return self.store.STORAGE_TYPE(available_id, **kwargs)
def delete_item(self, item):
"""
Deletes the given item from the store and adds the index to its IdStore
to be recycled
:return: Nothing
:rtype: None
"""
# Get index of item to be deleted
deleted_index = item.index
# Delete the item from the store
self.store.delete_item(item)
# Add the index to the IdStore, so it can be recycled
self.idStore.store_id(deleted_index)
|
6a8c8bc0e407327e5c0e4cae3d4d6ace179a6940 | webserver/codemanagement/serializers.py | webserver/codemanagement/serializers.py | from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
| from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug', 'eligible_to_win')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
| Add team eligibility to API | Add team eligibility to API
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
Add team eligibility to API | from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug', 'eligible_to_win')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
| <commit_before>from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
<commit_msg>Add team eligibility to API<commit_after> | from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug', 'eligible_to_win')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
| from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
Add team eligibility to APIfrom rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug', 'eligible_to_win')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
| <commit_before>from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
<commit_msg>Add team eligibility to API<commit_after>from rest_framework import serializers
from greta.models import Repository
from competition.models import Team
from .models import TeamClient, TeamSubmission
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ('id', 'name', 'slug', 'eligible_to_win')
class RepoSerializer(serializers.ModelSerializer):
class Meta:
model = Repository
fields = ('name', 'description', 'forked_from',
'path', 'is_ready')
forked_from = serializers.RelatedField()
path = serializers.SerializerMethodField('get_path')
is_ready = serializers.SerializerMethodField('get_is_ready')
def get_path(self, repo):
return repo.path
def get_is_ready(self, repo):
return repo.is_ready()
class TeamSubmissionSerializer(serializers.ModelSerializer):
class Meta:
model = TeamSubmission
fields = ('name', 'commit')
class TeamClientSerializer(serializers.ModelSerializer):
class Meta:
model = TeamClient
fields = ('team', 'repository', 'tag', 'language')
team = TeamSerializer()
repository = RepoSerializer()
tag = serializers.SerializerMethodField('get_tag')
language = serializers.SerializerMethodField('get_language')
def get_tag(self, teamclient):
try:
latest_sub= teamclient.submissions.latest()
return TeamSubmissionSerializer(latest_sub).data
except TeamSubmission.DoesNotExist:
return None
def get_language(self, teamclient):
return teamclient.base.language
|
72902ebcada7bdc7a889f8766b63afff82110182 | webshop/extensions/category/__init__.py | webshop/extensions/category/__init__.py | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
""" | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
TODO: We want a setting allowing us to limit the nestedness of categories.
For 'navigational' reasons, a number of 3 should be a reasonable default.
""" | Comment about recursion limit in categories. | Comment about recursion limit in categories.
| Python | agpl-3.0 | dokterbob/django-shopkit,dokterbob/django-shopkit | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
"""Comment about recursion limit in categories. | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
TODO: We want a setting allowing us to limit the nestedness of categories.
For 'navigational' reasons, a number of 3 should be a reasonable default.
""" | <commit_before># Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
"""<commit_msg>Comment about recursion limit in categories.<commit_after> | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
TODO: We want a setting allowing us to limit the nestedness of categories.
For 'navigational' reasons, a number of 3 should be a reasonable default.
""" | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
"""Comment about recursion limit in categories.# Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
TODO: We want a setting allowing us to limit the nestedness of categories.
For 'navigational' reasons, a number of 3 should be a reasonable default.
""" | <commit_before># Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
"""<commit_msg>Comment about recursion limit in categories.<commit_after># Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-webshop.
#
# django-webshop is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Django-webshop, by default, contains base classes for two kinds of categories:
* Simple categories, which define a base class for products that belong to
exactly one category.
* Advanced categories, that belong to zero or more categories.
Furthermore, generic abstract base models are defined for 'normal' categories
and for nested categories, allowing for the hierarchical categorization of
products.
TODO: We want a setting allowing us to limit the nestedness of categories.
For 'navigational' reasons, a number of 3 should be a reasonable default.
""" |
56d3db6aae71c88ff8b55bb1d173abc025be7e8c | jacquard/tests/test_cli.py | jacquard/tests/test_cli.py | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
| import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| Add test of a write command | Add test of a write command
| Python | mit | prophile/jacquard,prophile/jacquard | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
Add test of a write command | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| <commit_before>import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
<commit_msg>Add test of a write command<commit_after> | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
Add test of a write commandimport io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| <commit_before>import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
<commit_msg>Add test of a write command<commit_after>import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
|
e9df15b0f084ed9e026a5de129b109a3c546f99c | src/libeeyore/parse_tree_to_cpp.py | src/libeeyore/parse_tree_to_cpp.py |
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
| from itertools import imap
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from functionvalues import *
from languagevalues import *
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def remove_comments( ln ):
i = ln.find( "#" )
if i != -1:
return ln[:i]
else:
return ln
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( env )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, imap( remove_comments, parse_tree_in_fl ) ) )
cpp_out_fl.write( env.render_exe( values ) )
| Handle comments in parse tree. | Handle comments in parse tree.
| Python | mit | andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper |
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
Handle comments in parse tree. | from itertools import imap
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from functionvalues import *
from languagevalues import *
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def remove_comments( ln ):
i = ln.find( "#" )
if i != -1:
return ln[:i]
else:
return ln
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( env )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, imap( remove_comments, parse_tree_in_fl ) ) )
cpp_out_fl.write( env.render_exe( values ) )
| <commit_before>
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
<commit_msg>Handle comments in parse tree.<commit_after> | from itertools import imap
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from functionvalues import *
from languagevalues import *
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def remove_comments( ln ):
i = ln.find( "#" )
if i != -1:
return ln[:i]
else:
return ln
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( env )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, imap( remove_comments, parse_tree_in_fl ) ) )
cpp_out_fl.write( env.render_exe( values ) )
|
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
Handle comments in parse tree.from itertools import imap
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from functionvalues import *
from languagevalues import *
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def remove_comments( ln ):
i = ln.find( "#" )
if i != -1:
return ln[:i]
else:
return ln
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( env )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, imap( remove_comments, parse_tree_in_fl ) ) )
cpp_out_fl.write( env.render_exe( values ) )
| <commit_before>
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
<commit_msg>Handle comments in parse tree.<commit_after>from itertools import imap
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from functionvalues import *
from languagevalues import *
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def remove_comments( ln ):
i = ln.find( "#" )
if i != -1:
return ln[:i]
else:
return ln
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( env )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, imap( remove_comments, parse_tree_in_fl ) ) )
cpp_out_fl.write( env.render_exe( values ) )
|
6e660da290db674eebb0c353662e5400bc735397 | examples/backplane_demo.py | examples/backplane_demo.py | #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
| #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print("pub", pub, repr(pub), pub.input)
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
| Update backplane demo to be py3 only | Update backplane demo to be py3 only
| Python | apache-2.0 | sparkslabs/guild,sparkslabs/guild,sparkslabs/guild | #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
Update backplane demo to be py3 only | #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print("pub", pub, repr(pub), pub.input)
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
| <commit_before>#!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
<commit_msg>Update backplane demo to be py3 only<commit_after> | #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print("pub", pub, repr(pub), pub.input)
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
| #!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
Update backplane demo to be py3 only#!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print("pub", pub, repr(pub), pub.input)
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
| <commit_before>#!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
<commit_msg>Update backplane demo to be py3 only<commit_after>#!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print("pub", pub, repr(pub), pub.input)
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
|
355372ff51a84c0a6d7d86c0ef1fb12def341436 | invada/engine.py | invada/engine.py | # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
| # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
| Add the score to Engine.chat return values | Add the score to Engine.chat return values
| Python | mit | carrotflakes/invada | # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
Add the score to Engine.chat return values | # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
| <commit_before># -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
<commit_msg>Add the score to Engine.chat return values<commit_after> | # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
| # -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
Add the score to Engine.chat return values# -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
| <commit_before># -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
return best_response_pair.generate(best_captured, context, self.knowledge)
<commit_msg>Add the score to Engine.chat return values<commit_after># -*- coding: utf-8 -*-
class Engine:
def __init__(self,
response_pairs,
knowledge={}):
self.response_pairs = response_pairs
self.knowledge = knowledge
def chat(self, user_utterance, context):
best_score = 0
best_response_pair = None
best_captured = {}
for response_pair in self.response_pairs:
captured = response_pair.match(user_utterance, self.knowledge)
if captured is None:
continue
score = response_pair.score(captured, context, self.knowledge)
if best_score < score:
best_score, best_response_pair, best_captured = score, response_pair, captured
response, new_context = best_response_pair.generate(best_captured, context, self.knowledge)
return response, new_context, best_score
|
1d52996a88eb5aed643fe61ee959bd88373401b3 | filebutler_upload/utils.py | filebutler_upload/utils.py | from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
| from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
| Throw a linebreak in there upon completion | Throw a linebreak in there upon completion
| Python | bsd-3-clause | jhaals/filebutler-upload | from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
Throw a linebreak in there upon completion | from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
| <commit_before>from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
<commit_msg>Throw a linebreak in there upon completion<commit_after> | from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
| from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
Throw a linebreak in there upon completionfrom datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
| <commit_before>from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
<commit_msg>Throw a linebreak in there upon completion<commit_after>from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
|
07f96a22afe2d010809d03077d9cdd5ecb43d017 | migrations/0020_change_ds_name_to_non_uniqe.py | migrations/0020_change_ds_name_to_non_uniqe.py | from redash.models import db
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT unique_name")
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for constraint in ['unique_name', 'data_sources_name']:
try:
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint))
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit()
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| Update data source unique name migration to support another name of constraint | Update data source unique name migration to support another name of constraint
| Python | bsd-2-clause | akariv/redash,chriszs/redash,jmvasquez/redashtest,pubnative/redash,akariv/redash,amino-data/redash,ninneko/redash,ninneko/redash,denisov-vlad/redash,EverlyWell/redash,ninneko/redash,amino-data/redash,44px/redash,moritz9/redash,guaguadev/redash,moritz9/redash,guaguadev/redash,chriszs/redash,pubnative/redash,44px/redash,rockwotj/redash,44px/redash,vishesh92/redash,guaguadev/redash,getredash/redash,EverlyWell/redash,M32Media/redash,ninneko/redash,stefanseifert/redash,alexanderlz/redash,useabode/redash,pubnative/redash,useabode/redash,denisov-vlad/redash,ninneko/redash,denisov-vlad/redash,M32Media/redash,getredash/redash,hudl/redash,alexanderlz/redash,chriszs/redash,guaguadev/redash,hudl/redash,denisov-vlad/redash,M32Media/redash,chriszs/redash,rockwotj/redash,EverlyWell/redash,moritz9/redash,imsally/redash,vishesh92/redash,moritz9/redash,M32Media/redash,imsally/redash,jmvasquez/redashtest,stefanseifert/redash,amino-data/redash,crowdworks/redash,alexanderlz/redash,hudl/redash,easytaxibr/redash,getredash/redash,crowdworks/redash,guaguadev/redash,denisov-vlad/redash,crowdworks/redash,akariv/redash,jmvasquez/redashtest,EverlyWell/redash,akariv/redash,imsally/redash,rockwotj/redash,jmvasquez/redashtest,easytaxibr/redash,imsally/redash,easytaxibr/redash,stefanseifert/redash,pubnative/redash,pubnative/redash,akariv/redash,stefanseifert/redash,vishesh92/redash,amino-data/redash,getredash/redash,hudl/redash,jmvasquez/redashtest,useabode/redash,rockwotj/redash,44px/redash,stefanseifert/redash,crowdworks/redash,vishesh92/redash,useabode/redash,alexanderlz/redash,easytaxibr/redash,easytaxibr/redash,getredash/redash | from redash.models import db
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT unique_name")
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
Update data source unique name migration to support another name of constraint | from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for constraint in ['unique_name', 'data_sources_name']:
try:
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint))
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit()
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| <commit_before>from redash.models import db
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT unique_name")
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
<commit_msg>Update data source unique name migration to support another name of constraint<commit_after> | from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for constraint in ['unique_name', 'data_sources_name']:
try:
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint))
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit()
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| from redash.models import db
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT unique_name")
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
Update data source unique name migration to support another name of constraintfrom redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for constraint in ['unique_name', 'data_sources_name']:
try:
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint))
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit()
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
| <commit_before>from redash.models import db
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT unique_name")
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
<commit_msg>Update data source unique name migration to support another name of constraint<commit_after>from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for constraint in ['unique_name', 'data_sources_name']:
try:
db.database.execute_sql("ALTER TABLE data_sources DROP CONSTRAINT {}".format(constraint))
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit()
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
|
169dda227f85f77ac52a4295e8fb7acd1b3184f5 | core/observables/mac_address.py | core/observables/mac_address.py | from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]?){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
self.value = re.sub(r'[.:\-]', '', self.value)
self.value = self.value.upper()
self.value = \
':'.join([self.value[i:i + 2] for i in range(0, len(self.value), 2)])
| from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
value = re.sub(r'[.:\-]', '', self.value).upper()
self.value = ':'.join(
value[i:i + 2] for i in xrange(0, len(value), 2)
)
| Make byte-separator mandatory in MAC addresses | Make byte-separator mandatory in MAC addresses
This will prevent false positive (from hash values for example).
| Python | apache-2.0 | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]?){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
self.value = re.sub(r'[.:\-]', '', self.value)
self.value = self.value.upper()
self.value = \
':'.join([self.value[i:i + 2] for i in range(0, len(self.value), 2)])
Make byte-separator mandatory in MAC addresses
This will prevent false positive (from hash values for example). | from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
value = re.sub(r'[.:\-]', '', self.value).upper()
self.value = ':'.join(
value[i:i + 2] for i in xrange(0, len(value), 2)
)
| <commit_before>from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]?){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
self.value = re.sub(r'[.:\-]', '', self.value)
self.value = self.value.upper()
self.value = \
':'.join([self.value[i:i + 2] for i in range(0, len(self.value), 2)])
<commit_msg>Make byte-separator mandatory in MAC addresses
This will prevent false positive (from hash values for example).<commit_after> | from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
value = re.sub(r'[.:\-]', '', self.value).upper()
self.value = ':'.join(
value[i:i + 2] for i in xrange(0, len(value), 2)
)
| from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]?){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
self.value = re.sub(r'[.:\-]', '', self.value)
self.value = self.value.upper()
self.value = \
':'.join([self.value[i:i + 2] for i in range(0, len(self.value), 2)])
Make byte-separator mandatory in MAC addresses
This will prevent false positive (from hash values for example).from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
value = re.sub(r'[.:\-]', '', self.value).upper()
self.value = ':'.join(
value[i:i + 2] for i in xrange(0, len(value), 2)
)
| <commit_before>from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]?){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
self.value = re.sub(r'[.:\-]', '', self.value)
self.value = self.value.upper()
self.value = \
':'.join([self.value[i:i + 2] for i in range(0, len(self.value), 2)])
<commit_msg>Make byte-separator mandatory in MAC addresses
This will prevent false positive (from hash values for example).<commit_after>from __future__ import unicode_literals
import re
from core.observables import Observable
class MacAddress(Observable):
regex = r'(?P<search>(([0-9A-Fa-f]{1,2}[.:-]){5,7}([0-9A-Fa-f]{1,2})))'
exclude_fields = Observable.exclude_fields
DISPLAY_FIELDS = Observable.DISPLAY_FIELDS
@classmethod
def is_valid(cls, match):
value = match.group('search')
return len(value) > 0
def normalize(self):
value = re.sub(r'[.:\-]', '', self.value).upper()
self.value = ':'.join(
value[i:i + 2] for i in xrange(0, len(value), 2)
)
|
bbe835c8aa561d8db58e116f0e55a5b19c4f9ca4 | firecares/sitemaps.py | firecares/sitemaps.py | from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False)
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
| from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False).only('population', 'featured', 'name')
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
| Fix sitemap memory consumption during generation | Fix sitemap memory consumption during generation
- Defer ALL FireDepartment fields except for those required to create a sitemap
- Was causing node startup to hang
see #321 | Python | mit | FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares | from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False)
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
Fix sitemap memory consumption during generation
- Defer ALL FireDepartment fields except for those required to create a sitemap
- Was causing node startup to hang
see #321 | from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False).only('population', 'featured', 'name')
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
| <commit_before>from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False)
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
<commit_msg>Fix sitemap memory consumption during generation
- Defer ALL FireDepartment fields except for those required to create a sitemap
- Was causing node startup to hang
see #321<commit_after> | from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False).only('population', 'featured', 'name')
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
| from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False)
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
Fix sitemap memory consumption during generation
- Defer ALL FireDepartment fields except for those required to create a sitemap
- Was causing node startup to hang
see #321from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False).only('population', 'featured', 'name')
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
| <commit_before>from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False)
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
<commit_msg>Fix sitemap memory consumption during generation
- Defer ALL FireDepartment fields except for those required to create a sitemap
- Was causing node startup to hang
see #321<commit_after>from django.contrib import sitemaps
from firecares.firestation.models import FireDepartment
from django.db.models import Max
from django.core.urlresolvers import reverse
class BaseSitemap(sitemaps.Sitemap):
protocol = 'https'
def items(self):
return ['media', 'models_performance_score', 'models_community_risk', 'safe_grades', 'login', 'contact_us',
'firedepartment_list']
def priority(self, item):
return 1
def location(self, item):
return reverse(item)
class DepartmentsSitemap(sitemaps.Sitemap):
protocol = 'https'
max_population = 1
def items(self):
queryset = FireDepartment.objects.filter(archived=False).only('population', 'featured', 'name')
self.max_population = queryset.aggregate(Max('population'))['population__max']
return queryset
def location(self, item):
return item.get_absolute_url()
def priority(self, item):
if item.featured is True:
return 1
if item.population is None:
return 0
# adding a bit to the total so featured items are always above others
priority = item.population / float(self.max_population + 0.1)
return priority
def lastmod(self, item):
return item.modified
|
196b9547b4dbcbfbf4891c7fd3ea3b9944018430 | scripts/cronRefreshEdxQualtrics.py | scripts/cronRefreshEdxQualtrics.py | from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| Revert "Revert "Added script for cron job to load surveys to database."" | Revert "Revert "Added script for cron job to load surveys to database.""
This reverts commit 2192219d92713c6eb76593d0c6c29413d040db6a.
| Python | bsd-3-clause | paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation,paepcke/json_to_relation | from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Revert "Added script for cron job to load surveys to database.""
This reverts commit 2192219d92713c6eb76593d0c6c29413d040db6a. | from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| <commit_before>from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Revert "Added script for cron job to load surveys to database.""
This reverts commit 2192219d92713c6eb76593d0c6c29413d040db6a.<commit_after> | from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
Revert "Revert "Added script for cron job to load surveys to database.""
This reverts commit 2192219d92713c6eb76593d0c6c29413d040db6a.from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
| <commit_before>from surveyextractor import QualtricsExtractor
import getopt, sys
# Script for scheduling regular EdxQualtrics updates
# Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
<commit_msg>Revert "Revert "Added script for cron job to load surveys to database.""
This reverts commit 2192219d92713c6eb76593d0c6c29413d040db6a.<commit_after>from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
0b77e09ac16006d1baa6a5f4093b51c1a13863e9 | app/models.py | app/models.py | from app import db
class Digit(db.Model):
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
image = db.Column(db.BLOB)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
| from app import db
class Digit(db.Model):
__tablename__ = 'digits'
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
def as_dict(self, fields=None):
if not fields:
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
else:
return {c: getattr(self, c) for c in fields}
| Add as_dict method to Digit model | Add as_dict method to Digit model
| Python | mit | starcalibre/MNIST3D,starcalibre/MNIST3D,starcalibre/MNIST3D | from app import db
class Digit(db.Model):
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
image = db.Column(db.BLOB)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
Add as_dict method to Digit model | from app import db
class Digit(db.Model):
__tablename__ = 'digits'
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
def as_dict(self, fields=None):
if not fields:
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
else:
return {c: getattr(self, c) for c in fields}
| <commit_before>from app import db
class Digit(db.Model):
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
image = db.Column(db.BLOB)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
<commit_msg>Add as_dict method to Digit model<commit_after> | from app import db
class Digit(db.Model):
__tablename__ = 'digits'
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
def as_dict(self, fields=None):
if not fields:
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
else:
return {c: getattr(self, c) for c in fields}
| from app import db
class Digit(db.Model):
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
image = db.Column(db.BLOB)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
Add as_dict method to Digit modelfrom app import db
class Digit(db.Model):
__tablename__ = 'digits'
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
def as_dict(self, fields=None):
if not fields:
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
else:
return {c: getattr(self, c) for c in fields}
| <commit_before>from app import db
class Digit(db.Model):
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
image = db.Column(db.BLOB)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
<commit_msg>Add as_dict method to Digit model<commit_after>from app import db
class Digit(db.Model):
__tablename__ = 'digits'
id = db.Column(db.INTEGER, primary_key=True)
label = db.Column(db.INTEGER)
tsne_x = db.Column(db.REAL)
tsne_y = db.Column(db.REAL)
tsne_z = db.Column(db.REAL)
array = db.Column(db.String)
def __repr__(self):
return '<Digit %d %d>' % (self.id, self.label)
def as_dict(self, fields=None):
if not fields:
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
else:
return {c: getattr(self, c) for c in fields}
|
e2c92e8b6e8fb10addc73986914014b278598470 | spotpy/examples/spot_setup_standardnormal.py | spotpy/examples/spot_setup_standardnormal.py | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction | Fix docstring in standardnormal example | Fix docstring in standardnormal example
| Python | mit | bees4ever/spotpy,bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy,thouska/spotpy,thouska/spotpy | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunctionFix docstring in standardnormal example | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction | <commit_before>'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction<commit_msg>Fix docstring in standardnormal example<commit_after> | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction | '''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunctionFix docstring in standardnormal example'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction | <commit_before>'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Rosenbrock function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction<commit_msg>Fix docstring in standardnormal example<commit_after>'''
Copyright 2015 by Tobias Houska
This file is part of Statistical Parameter Estimation Tool (SPOTPY).
:author: Tobias Houska
This example implements the Standard Normal function into SPOT.
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import spotpy
class spot_setup(object):
def __init__(self,mean=0,std=1):
self.params = [spotpy.parameter.Uniform('x',-5,5,1.5,3.0)
]
self.mean=mean
self.std=std
def parameters(self):
return spotpy.parameter.generate(self.params)
def simulation(self,x):
simulations= (1.0/(std*np.sqrt(2*np.pi)))**((-1.0/2.0)*(((x-self.mean)/self.std)**2))
return simulations
def evaluation(self):
observations = [0]
return observations
def objectivefunction(self, simulation,evaluation):
objectivefunction = -spotpy.objectivefunctions.rmse(evaluation = evaluation,simulation = simulation)
return objectivefunction |
c973385f877d940231deb8d81e929647eadc280a | app/config.py | app/config.py | # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URI',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
| # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URL',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
| Use standard env var for DATABASE_URL | Use standard env var for DATABASE_URL
| Python | mit | crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes | # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URI',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
Use standard env var for DATABASE_URL | # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URL',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
| <commit_before># -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URI',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
<commit_msg>Use standard env var for DATABASE_URL<commit_after> | # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URL',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
| # -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URI',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
Use standard env var for DATABASE_URL# -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URL',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
| <commit_before># -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URI',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
<commit_msg>Use standard env var for DATABASE_URL<commit_after># -*- coding: utf-8 -*-
"""
Application configuration
"""
import os
from os.path import dirname, join
# get settings from environment, or credstash if running in AWS
env = os.environ
if env.get('SETTINGS') == 'AWS':
from lib.aws_env import env
ASSETS_DEBUG = False
DEBUG = bool(env.get('DEBUG', True))
HUMANIZE_USE_UTC = True
MARKDOWN_EXTENSIONS = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.smart_strong',
'markdown.extensions.smarty',
]
SECRET_KEY = env.get('SECRET_KEY', os.urandom(24))
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_PATH = join(dirname(__file__), '../development.db')
SQLALCHEMY_DATABASE_URI = env.get(
'DATABASE_URL',
'sqlite:///{}'.format(SQLALCHEMY_DATABASE_PATH))
SQLALCHEMY_TRACK_MODIFICATIONS = bool(env.get(
'SQLALCHEMY_TRACK_MODIFICATIONS',
False))
TESTING = bool(env.get('TESTING', False))
|
1b2fa45766b1ea5945f246d74bc4adf0114abe84 | astroquery/splatalogue/__init__.py | astroquery/splatalogue/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| Fix typo in description of config item | Fix typo in description of config item
| Python | bsd-3-clause | imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
Fix typo in description of config item | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
<commit_msg>Fix typo in description of config item<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
Fix typo in description of config item# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
<commit_msg>Fix typo in description of config item<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg (adam.g.ginsburg@gmail.com)
:Originally contributed by:
Magnus Vilhelm Persson (magnusp@vilhelm.nu)
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
|
cc7e3e5ef9d9c59b6b1ac80826445839ede73092 | astroquery/mast/__init__.py | astroquery/mast/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| Revert mast dev host change | Revert mast dev host change
| Python | bsd-3-clause | imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
Revert mast dev host change | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
<commit_msg>Revert mast dev host change<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
Revert mast dev host change# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
<commit_msg>Revert mast dev host change<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
|
81de62d46d7daefb2e1eef0d0cc4f5ca5c8aef2f | blog/utils.py | blog/utils.py | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
| from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| Use GCBV queryset to get PostGetMixin obj. | Ch18: Use GCBV queryset to get PostGetMixin obj.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
Ch18: Use GCBV queryset to get PostGetMixin obj. | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| <commit_before>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
<commit_msg>Ch18: Use GCBV queryset to get PostGetMixin obj.<commit_after> | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
Ch18: Use GCBV queryset to get PostGetMixin obj.from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| <commit_before>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
<commit_msg>Ch18: Use GCBV queryset to get PostGetMixin obj.<commit_after>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
|
8e58d7cccb837254cc433c7533bff119cc19645d | javascript_settings/templatetags/javascript_settings_tags.py | javascript_settings/templatetags/javascript_settings_tags.py | from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| Use json instead of django.utils.simplejson. | Use json instead of django.utils.simplejson. | Python | mit | pozytywnie/django-javascript-settings | from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
Use json instead of django.utils.simplejson. | import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| <commit_before>from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
<commit_msg>Use json instead of django.utils.simplejson.<commit_after> | import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
Use json instead of django.utils.simplejson.import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| <commit_before>from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
<commit_msg>Use json instead of django.utils.simplejson.<commit_after>import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
|
e4ad2863236cd36e5860f1d17a06ca05e30216d5 | make_database.py | make_database.py | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| Store more stuff about songs in the queue | Store more stuff about songs in the queue
| Python | mit | projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
Store more stuff about songs in the queue | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| <commit_before>import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
<commit_msg>Store more stuff about songs in the queue<commit_after> | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
Store more stuff about songs in the queueimport sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| <commit_before>import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
<commit_msg>Store more stuff about songs in the queue<commit_after>import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
|
5eb2c6f7e1bf0cc1b73b167a08085fccf77974fe | app/config/aws.py | app/config/aws.py | from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| # -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| Tidy up and doc-comment AWSInstanceEnv class | Tidy up and doc-comment AWSInstanceEnv class
| Python | mit | crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes | from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
Tidy up and doc-comment AWSInstanceEnv class | # -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| <commit_before>from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
<commit_msg>Tidy up and doc-comment AWSInstanceEnv class<commit_after> | # -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
Tidy up and doc-comment AWSInstanceEnv class# -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| <commit_before>from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
<commit_msg>Tidy up and doc-comment AWSInstanceEnv class<commit_after># -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
|
58d7592c603509f2bb625e4e2e5cb31ada4a8194 | astropy/nddata/convolution/tests/test_make_kernel.py | astropy/nddata/convolution/tests/test_make_kernel.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| Change test for make_kernel(kerneltype='airy') from class to function | Change test for make_kernel(kerneltype='airy') from class to function
| Python | bsd-3-clause | AustereCuriosity/astropy,astropy/astropy,lpsinger/astropy,MSeifert04/astropy,larrybradley/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,joergdietrich/astropy,mhvk/astropy,kelle/astropy,joergdietrich/astropy,StuartLittlefair/astropy,tbabej/astropy,dhomeier/astropy,StuartLittlefair/astropy,mhvk/astropy,DougBurke/astropy,MSeifert04/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,astropy/astropy,funbaker/astropy,astropy/astropy,stargaser/astropy,stargaser/astropy,DougBurke/astropy,pllim/astropy,tbabej/astropy,pllim/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,funbaker/astropy,larrybradley/astropy,StuartLittlefair/astropy,larrybradley/astropy,larrybradley/astropy,saimn/astropy,bsipocz/astropy,saimn/astropy,stargaser/astropy,joergdietrich/astropy,tbabej/astropy,aleksandr-bakanov/astropy,pllim/astropy,saimn/astropy,lpsinger/astropy,AustereCuriosity/astropy,DougBurke/astropy,pllim/astropy,dhomeier/astropy,funbaker/astropy,pllim/astropy,tbabej/astropy,lpsinger/astropy,astropy/astropy,kelle/astropy,kelle/astropy,aleksandr-bakanov/astropy,astropy/astropy,joergdietrich/astropy,DougBurke/astropy,AustereCuriosity/astropy,funbaker/astropy,saimn/astropy,bsipocz/astropy,MSeifert04/astropy,mhvk/astropy,bsipocz/astropy,mhvk/astropy,larrybradley/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,saimn/astropy,tbabej/astropy,dhomeier/astropy,kelle/astropy,dhomeier/astropy,bsipocz/astropy,lpsinger/astropy,stargaser/astropy,joergdietrich/astropy,mhvk/astropy | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
Change test for make_kernel(kerneltype='airy') from class to function | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
<commit_msg>Change test for make_kernel(kerneltype='airy') from class to function<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
Change test for make_kernel(kerneltype='airy') from class to function# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
<commit_msg>Change test for make_kernel(kerneltype='airy') from class to function<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
|
9cdd86499013c1deac7caeb8320c34294789f716 | py/garage/garage/asyncs/actors.py | py/garage/garage/asyncs/actors.py | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
| """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| Add _kill_and_join to async actor stub | Add _kill_and_join to async actor stub
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
Add _kill_and_join to async actor stub | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| <commit_before>"""Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
<commit_msg>Add _kill_and_join to async actor stub<commit_after> | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
Add _kill_and_join to async actor stub"""Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| <commit_before>"""Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
<commit_msg>Add _kill_and_join to async actor stub<commit_after>"""Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
|
4d40e9db4bd6b58787557e8d5547f69eb67c9b96 | tests/changes/api/test_author_build_index.py | tests/changes/api/test_author_build_index.py | from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='foo@example.com', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| Add additional coverage to author build list | Add additional coverage to author build list
| Python | apache-2.0 | wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes | from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='foo@example.com', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
Add additional coverage to author build list | from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| <commit_before>from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='foo@example.com', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
<commit_msg>Add additional coverage to author build list<commit_after> | from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='foo@example.com', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
Add additional coverage to author build listfrom uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| <commit_before>from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='foo@example.com', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
<commit_msg>Add additional coverage to author build list<commit_after>from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
|
5cd9499fcc0c1f9b48216aeca11a7adcd8995a47 | netmiko/mrv/mrv_ssh.py | netmiko/mrv/mrv_ssh.py | """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| Fix for MRV failing to enter enable mode | Fix for MRV failing to enter enable mode
| Python | mit | ktbyers/netmiko,ktbyers/netmiko | """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
Fix for MRV failing to enter enable mode | """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| <commit_before>"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
<commit_msg>Fix for MRV failing to enter enable mode<commit_after> | """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
Fix for MRV failing to enter enable mode"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| <commit_before>"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
<commit_msg>Fix for MRV failing to enter enable mode<commit_after>"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
|
0d2f35ddc27cf4c7155a4d1648c0bbfe0ff3a528 | numpy/_array_api/dtypes.py | numpy/_array_api/dtypes.py | from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| Fix the bool name in the array API namespace | Fix the bool name in the array API namespace
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
Fix the bool name in the array API namespace | from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| <commit_before>from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
<commit_msg>Fix the bool name in the array API namespace<commit_after> | from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
Fix the bool name in the array API namespacefrom .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| <commit_before>from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
<commit_msg>Fix the bool name in the array API namespace<commit_after>from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
|
7a0560d8bd9dcb421b54522df92618d439941e69 | bills/urls.py | bills/urls.py | from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
| from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
| Change bill detail page to use session and identifier | Change bill detail page to use session and identifier
| Python | mit | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot | from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
Change bill detail page to use session and identifier | from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
| <commit_before>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
<commit_msg>Change bill detail page to use session and identifier<commit_after> | from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
| from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
Change bill detail page to use session and identifierfrom . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
| <commit_before>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
<commit_msg>Change bill detail page to use session and identifier<commit_after>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
d9f623baaa8e1d1075f9132108ed7bb11eea39b0 | dask/__init__.py | dask/__init__.py | from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| Replace dask.get from core.get to async.get_sync | Replace dask.get from core.get to async.get_sync
We really shouldn't publish core.get anywhere, particularly in the
top level API.
| Python | bsd-3-clause | vikhyat/dask,cowlicks/dask,ContinuumIO/dask,blaze/dask,ContinuumIO/dask,mraspaud/dask,mrocklin/dask,cpcloud/dask,jakirkham/dask,chrisbarber/dask,jakirkham/dask,blaze/dask,mikegraham/dask,pombredanne/dask,gameduell/dask,pombredanne/dask,dask/dask,dask/dask,vikhyat/dask,mrocklin/dask,jcrist/dask,mraspaud/dask,jcrist/dask | from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
Replace dask.get from core.get to async.get_sync
We really shouldn't publish core.get anywhere, particularly in the
top level API. | from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| <commit_before>from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
<commit_msg>Replace dask.get from core.get to async.get_sync
We really shouldn't publish core.get anywhere, particularly in the
top level API.<commit_after> | from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
Replace dask.get from core.get to async.get_sync
We really shouldn't publish core.get anywhere, particularly in the
top level API.from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| <commit_before>from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
<commit_msg>Replace dask.get from core.get to async.get_sync
We really shouldn't publish core.get anywhere, particularly in the
top level API.<commit_after>from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
|
3abe25d2272e2a0111511b68407da0ef3c53f59e | nazs/samba/module.py | nazs/samba/module.py | from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
| from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
| Use wizard settings during samba provision | Use wizard settings during samba provision
| Python | agpl-3.0 | exekias/droplet,exekias/droplet,exekias/droplet | from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
Use wizard settings during samba provision | from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
| <commit_before>from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
<commit_msg>Use wizard settings during samba provision<commit_after> | from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
| from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
Use wizard settings during samba provisionfrom nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
| <commit_before>from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
<commit_msg>Use wizard settings during samba provision<commit_after>from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
|
78705f598e7e3325e871bd17ff353a31c71bc399 | opps/articles/forms.py | opps/articles/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
| Extend all admin form to Container Admin Form (json field) | Extend all admin form to Container Admin Form (json field)
| Python | mit | opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
Extend all admin form to Container Admin Form (json field) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
<commit_msg>Extend all admin form to Container Admin Form (json field)<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
Extend all admin form to Container Admin Form (json field)#!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
<commit_msg>Extend all admin form to Container Admin Form (json field)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
|
52c3981b8880085d060f874eb8feace6ac125411 | tests/test_cli_bands.py | tests/test_cli_bands.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
| Replace exact equality assert with isclose in bands cli | Replace exact equality assert with isclose in bands cli
| Python | apache-2.0 | Z2PackDev/TBmodels,Z2PackDev/TBmodels | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
Replace exact equality assert with isclose in bands cli | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
<commit_msg>Replace exact equality assert with isclose in bands cli<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
Replace exact equality assert with isclose in bands cli#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
<commit_msg>Replace exact equality assert with isclose in bands cli<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <greschd@gmx.ch>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
|
8b4b5eb2506feed164b69efa66b4cdae159182c3 | tests/test_cli_parse.py | tests/test_cli_parse.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| Fix pre-commit issues in the cli_parse tests. | Fix pre-commit issues in the cli_parse tests.
| Python | apache-2.0 | Z2PackDev/TBmodels,Z2PackDev/TBmodels | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
Fix pre-commit issues in the cli_parse tests. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
<commit_msg>Fix pre-commit issues in the cli_parse tests.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
Fix pre-commit issues in the cli_parse tests.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
<commit_msg>Fix pre-commit issues in the cli_parse tests.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
|
afb400e16c1335531f259218a8b9937de48644e9 | polyaxon/checks/streams.py | polyaxon/checks/streams.py | from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| Update stream health health api url | Update stream health health api url
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
Update stream health health api url | from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| <commit_before>from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
<commit_msg>Update stream health health api url<commit_after> | from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
Update stream health health api urlfrom checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| <commit_before>from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
<commit_msg>Update stream health health api url<commit_after>from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
|
edb04d8e0ae03c9244b7d934fd713efbb94d5a58 | opps/api/urls.py | opps/api/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| Add api url to album and link | Add api url to album and link
| Python | mit | williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
Add api url to album and link | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
<commit_msg>Add api url to album and link<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
Add api url to album and link#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
<commit_msg>Add api url to album and link<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
|
7b19611d30dfc9091823ae3d960ab2790dfe9cfc | python/blur_human_faces.py | python/blur_human_faces.py | import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| Apply a blur filter automatically for each detected face | Apply a blur filter automatically for each detected face | Python | bsd-2-clause | symisc/pixlab,symisc/pixlab,symisc/pixlab | import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
Apply a blur filter automatically for each detected face | import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| <commit_before>import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
<commit_msg>Apply a blur filter automatically for each detected face<commit_after> | import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
Apply a blur filter automatically for each detected faceimport requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| <commit_before>import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
<commit_msg>Apply a blur filter automatically for each detected face<commit_after>import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
|
74ecf023ef13fdba6378d6b50b3eaeb06b9e0c97 | rebuild_dependant_repos.py | rebuild_dependant_repos.py | import os, sys, re, logging
import requests
from github import Github
logging.basicConfig(level=logging.DEBUG)
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["TAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["TAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile FROM " + image_name
logging.debug("Searching GitHub with query: '%s'", query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
logging.debug("Found %d candidate repositories.", len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
logging.debug("[%d/%d] Triggering CI pipeline for: %s", current_item, len(circleci_project_slugs), slug)
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
| import os, sys, re
import requests
from github import Github
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["AVATAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["AVATAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile " + image_name
print("Searching GitHub with query: '%s'" % query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
print("Found %d candidate repositories." % len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
print("[%d/%d] Triggering CI pipeline for: %s" % (current_item, len(circleci_project_slugs), slug))
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
| Rename env vars & modify query | Rename env vars & modify query
| Python | apache-2.0 | avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox | import os, sys, re, logging
import requests
from github import Github
logging.basicConfig(level=logging.DEBUG)
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["TAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["TAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile FROM " + image_name
logging.debug("Searching GitHub with query: '%s'", query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
logging.debug("Found %d candidate repositories.", len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
logging.debug("[%d/%d] Triggering CI pipeline for: %s", current_item, len(circleci_project_slugs), slug)
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
Rename env vars & modify query | import os, sys, re
import requests
from github import Github
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["AVATAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["AVATAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile " + image_name
print("Searching GitHub with query: '%s'" % query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
print("Found %d candidate repositories." % len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
print("[%d/%d] Triggering CI pipeline for: %s" % (current_item, len(circleci_project_slugs), slug))
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
| <commit_before>import os, sys, re, logging
import requests
from github import Github
logging.basicConfig(level=logging.DEBUG)
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["TAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["TAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile FROM " + image_name
logging.debug("Searching GitHub with query: '%s'", query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
logging.debug("Found %d candidate repositories.", len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
logging.debug("[%d/%d] Triggering CI pipeline for: %s", current_item, len(circleci_project_slugs), slug)
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
<commit_msg>Rename env vars & modify query<commit_after> | import os, sys, re
import requests
from github import Github
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["AVATAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["AVATAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile " + image_name
print("Searching GitHub with query: '%s'" % query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
print("Found %d candidate repositories." % len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
print("[%d/%d] Triggering CI pipeline for: %s" % (current_item, len(circleci_project_slugs), slug))
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
| import os, sys, re, logging
import requests
from github import Github
logging.basicConfig(level=logging.DEBUG)
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["TAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["TAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile FROM " + image_name
logging.debug("Searching GitHub with query: '%s'", query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
logging.debug("Found %d candidate repositories.", len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
logging.debug("[%d/%d] Triggering CI pipeline for: %s", current_item, len(circleci_project_slugs), slug)
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
Rename env vars & modify queryimport os, sys, re
import requests
from github import Github
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["AVATAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["AVATAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile " + image_name
print("Searching GitHub with query: '%s'" % query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
print("Found %d candidate repositories." % len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
print("[%d/%d] Triggering CI pipeline for: %s" % (current_item, len(circleci_project_slugs), slug))
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
| <commit_before>import os, sys, re, logging
import requests
from github import Github
logging.basicConfig(level=logging.DEBUG)
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["TAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["TAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile FROM " + image_name
logging.debug("Searching GitHub with query: '%s'", query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
logging.debug("Found %d candidate repositories.", len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
logging.debug("[%d/%d] Triggering CI pipeline for: %s", current_item, len(circleci_project_slugs), slug)
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
<commit_msg>Rename env vars & modify query<commit_after>import os, sys, re
import requests
from github import Github
CIRCLECI_BASEURL = "https://circleci.com/api/v2"
CIRCLECI_ACCESS_TOKEN = os.environ["AVATAO_CIRCLECI_TOKEN"]
GITHUB_ACCESS_TOKEN = os.environ["AVATAO_GITHUB_TOKEN"]
g = Github(GITHUB_ACCESS_TOKEN)
if len(sys.argv) < 2:
raise AttributeError("The image name is required as the first argument.")
image_name = sys.argv[1]
image_name = re.sub(r"[^a-zA-Z0-9-]", " ", image_name)
query = "org:avatao-content language:Dockerfile " + image_name
print("Searching GitHub with query: '%s'" % query)
code_search = g.search_code(query)
circleci_project_slugs = set()
for result in code_search:
circleci_project_slugs.add(f"gh/{result.repository.organization.login}/{result.repository.name}")
print("Found %d candidate repositories." % len(circleci_project_slugs))
current_item = 1
for slug in circleci_project_slugs:
print("[%d/%d] Triggering CI pipeline for: %s" % (current_item, len(circleci_project_slugs), slug))
requests.post(f"{CIRCLECI_BASEURL}/project/{slug}/pipeline", headers={"Circle-Token": CIRCLECI_ACCESS_TOKEN})
current_item += 1
|
8713f44fbd35f012ac7e01a64cffcfdf846fee9f | Lib/test/test_importlib/__init__.py | Lib/test/test_importlib/__init__.py | import os
import sys
from .. import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
| import os
import sys
from test import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
| Remove a relative import that escaped test.test_importlib. | Remove a relative import that escaped test.test_importlib.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | import os
import sys
from .. import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
Remove a relative import that escaped test.test_importlib. | import os
import sys
from test import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
| <commit_before>import os
import sys
from .. import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
<commit_msg>Remove a relative import that escaped test.test_importlib.<commit_after> | import os
import sys
from test import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
| import os
import sys
from .. import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
Remove a relative import that escaped test.test_importlib.import os
import sys
from test import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
| <commit_before>import os
import sys
from .. import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
<commit_msg>Remove a relative import that escaped test.test_importlib.<commit_after>import os
import sys
from test import support
import unittest
def test_suite(package=__package__, directory=os.path.dirname(__file__)):
suite = unittest.TestSuite()
for name in os.listdir(directory):
if name.startswith(('.', '__')):
continue
path = os.path.join(directory, name)
if (os.path.isfile(path) and name.startswith('test_') and
name.endswith('.py')):
submodule_name = os.path.splitext(name)[0]
module_name = "{0}.{1}".format(package, submodule_name)
__import__(module_name, level=0)
module_tests = unittest.findTestCases(sys.modules[module_name])
suite.addTest(module_tests)
elif os.path.isdir(path):
package_name = "{0}.{1}".format(package, name)
__import__(package_name, level=0)
package_tests = getattr(sys.modules[package_name], 'test_suite')()
suite.addTest(package_tests)
else:
continue
return suite
def test_main():
start_dir = os.path.dirname(__file__)
top_dir = os.path.dirname(os.path.dirname(start_dir))
test_loader = unittest.TestLoader()
support.run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
|
d98cdb7eae40b5bb11b5d1fc0eacc35ef6bf310d | wye/reports/views.py | wye/reports/views.py | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING])
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
| from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING]).order_by('expected_date')
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
| Order filter for report page | Order filter for report page
| Python | mit | pythonindia/wye,pythonindia/wye,pythonindia/wye,pythonindia/wye | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING])
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
Order filter for report page | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING]).order_by('expected_date')
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
| <commit_before>from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING])
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
<commit_msg>Order filter for report page<commit_after> | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING]).order_by('expected_date')
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
| from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING])
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
Order filter for report pagefrom django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING]).order_by('expected_date')
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
| <commit_before>from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING])
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
<commit_msg>Order filter for report page<commit_after>from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from wye.organisations.models import Organisation
from wye.workshops.models import Workshop
from wye.profiles.models import Profile
import datetime
from wye.base.constants import WorkshopStatus
@login_required
def index(request, days):
print(request.user.is_staff)
if not request.user.is_staff:
return ""
d = datetime.datetime.now() - datetime.timedelta(days=int(days))
organisations = Organisation.objects.filter(
active=True).filter(created_at__gte=d)
workshops = Workshop.objects.filter(
is_active=True).filter(
expected_date__gte=d).filter(
expected_date__lt=datetime.datetime.now()).filter(
status__in=[WorkshopStatus.COMPLETED,
WorkshopStatus.FEEDBACK_PENDING]).order_by('expected_date')
profiles = Profile.objects.filter(user__date_joined__gte=d)
no_of_participants = sum([w.no_of_participants for w in workshops])
template_name = 'reports/index.html'
context_dict = {}
context_dict['organisations'] = organisations
context_dict['workshops'] = workshops
context_dict['profiles'] = profiles
context_dict['no_of_participants'] = no_of_participants
context_dict['date'] = d
workshops = Workshop.objects.filter(
is_active=True)
return render(request, template_name, context_dict)
|
10f7938e37180c0cb3b701223cf6d1855e7d8f93 | watchdog_kj_kultura/main/models.py | watchdog_kj_kultura/main/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
@python_2_unicode_compatible
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
| Drop python_2_unicode_compatible for Settings, fix docs build on rtfd | Drop python_2_unicode_compatible for Settings, fix docs build on rtfd
| Python | mit | watchdogpolska/watchdog-kj-kultura,watchdogpolska/watchdog-kj-kultura,watchdogpolska/watchdog-kj-kultura | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
@python_2_unicode_compatible
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
Drop python_2_unicode_compatible for Settings, fix docs build on rtfd | from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
| <commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
@python_2_unicode_compatible
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
<commit_msg>Drop python_2_unicode_compatible for Settings, fix docs build on rtfd<commit_after> | from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
@python_2_unicode_compatible
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
Drop python_2_unicode_compatible for Settings, fix docs build on rtfdfrom django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
| <commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
@python_2_unicode_compatible
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
<commit_msg>Drop python_2_unicode_compatible for Settings, fix docs build on rtfd<commit_after>from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
from tinymce.models import HTMLField
from django.contrib.sites.models import Site
class SettingsQuerySet(models.QuerySet):
pass
class Settings(TimeStampedModel):
site = models.OneToOneField(Site, verbose_name=_("Site"))
home_content = HTMLField(verbose_name=_("Content of home page"))
objects = SettingsQuerySet.as_manager()
class Meta:
verbose_name = _("Settings")
verbose_name_plural = _("Settings")
ordering = ['created', ]
|
8dcb778c62c3c6722e2f6dabfd97f6f75c349e62 | celery_cgi.py | celery_cgi.py | import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_memory_per_child = 50000
)
| import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_tasks_per_child = 1,
worker_max_memory_per_child = 50000
)
| Set celery max tasks child to 1 | Set celery max tasks child to 1 | Python | unlicense | puruckertom/ubertool_ecorest,quanted/ubertool_ecorest,puruckertom/ubertool_ecorest,quanted/ubertool_ecorest,quanted/ubertool_ecorest,puruckertom/ubertool_ecorest,puruckertom/ubertool_ecorest,quanted/ubertool_ecorest | import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_memory_per_child = 50000
)
Set celery max tasks child to 1 | import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_tasks_per_child = 1,
worker_max_memory_per_child = 50000
)
| <commit_before>import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_memory_per_child = 50000
)
<commit_msg>Set celery max tasks child to 1<commit_after> | import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_tasks_per_child = 1,
worker_max_memory_per_child = 50000
)
| import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_memory_per_child = 50000
)
Set celery max tasks child to 1import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_tasks_per_child = 1,
worker_max_memory_per_child = 50000
)
| <commit_before>import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_memory_per_child = 50000
)
<commit_msg>Set celery max tasks child to 1<commit_after>import os
import logging
from celery import Celery
from temp_config.set_environment import DeployEnv
runtime_env = DeployEnv()
runtime_env.load_deployment_environment()
redis_server = os.environ.get('REDIS_HOSTNAME')
redis_port = os.environ.get('REDIS_PORT')
celery_tasks = [
'hms_flask.modules.hms_controller',
'pram_flask.tasks'
]
redis = 'redis://' + redis_server + ':' + redis_port + '/0'
logging.info("Celery connecting to redis server: " + redis)
celery = Celery('flask_qed', broker=redis, backend=redis, include=celery_tasks)
celery.conf.update(
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json',
CELERY_IGNORE_RESULT=True,
CELERY_TRACK_STARTED=True,
worker_max_tasks_per_child = 1,
worker_max_memory_per_child = 50000
)
|
d84a4efcf880bb668b2721af3f4ce18220e8baab | xvistaprof/reader.py | xvistaprof/reader.py | #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
| #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
| Use np.genfromtext to handle missing values | Use np.genfromtext to handle missing values
| Python | bsd-2-clause | jonathansick/xvistaprof | #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
Use np.genfromtext to handle missing values | #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
<commit_msg>Use np.genfromtext to handle missing values<commit_after> | #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
| #!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
Use np.genfromtext to handle missing values#!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.loadtxt(filename, dtype=np.dtype(dt), skiprows=15)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
<commit_msg>Use np.genfromtext to handle missing values<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
Reader for XVISTA .prof tables.
"""
import numpy as np
from astropy.table import Table
from astropy.io import registry
def xvista_table_reader(filename):
dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float),
('ELL', np.float), ('PA', np.float), ('EMAG', np.float),
('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float),
('YC', np.float), ('FRACONT', np.float), ('A1', np.float),
('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)]
data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15,
missing_values='*', filling_values=np.nan)
return Table(data)
registry.register_reader('xvistaprof', Table, xvista_table_reader)
|
3b5b3afbc66f60df45f0458ffdd0d37b9a7c50d0 | ptoolbox/tags.py | ptoolbox/tags.py | # -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
| # -*- coding: utf-8 -*-
import struct
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def jpeg_size(path):
"""Get image size.
Structure of JPEG file is:
ffd8 [ffXX SSSS DD DD ...] [ffYY SSSS DDDD ...] (S is 16bit size, D the data)
We look for the SOF0 header 0xffc0; its structure is
[ffc0 SSSS PPHH HHWW ...] where PP is 8bit precision, HHHH 16bit height, WWWW width
"""
with open(path, 'rb') as f:
_, header_type, size = struct.unpack('>HHH', f.read(6))
while header_type != 0xffc0:
f.seek(size - 2, 1)
header_type, size = struct.unpack('>HH', f.read(4))
bpi, height, width = struct.unpack('>BHH', f.read(5))
return width, height
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
| Add homemade fast width/height reader for JPEG files | Add homemade fast width/height reader for JPEG files
| Python | mit | vperron/picasa-toolbox | # -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
Add homemade fast width/height reader for JPEG files | # -*- coding: utf-8 -*-
import struct
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def jpeg_size(path):
"""Get image size.
Structure of JPEG file is:
ffd8 [ffXX SSSS DD DD ...] [ffYY SSSS DDDD ...] (S is 16bit size, D the data)
We look for the SOF0 header 0xffc0; its structure is
[ffc0 SSSS PPHH HHWW ...] where PP is 8bit precision, HHHH 16bit height, WWWW width
"""
with open(path, 'rb') as f:
_, header_type, size = struct.unpack('>HHH', f.read(6))
while header_type != 0xffc0:
f.seek(size - 2, 1)
header_type, size = struct.unpack('>HH', f.read(4))
bpi, height, width = struct.unpack('>BHH', f.read(5))
return width, height
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
| <commit_before># -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
<commit_msg>Add homemade fast width/height reader for JPEG files<commit_after> | # -*- coding: utf-8 -*-
import struct
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def jpeg_size(path):
"""Get image size.
Structure of JPEG file is:
ffd8 [ffXX SSSS DD DD ...] [ffYY SSSS DDDD ...] (S is 16bit size, D the data)
We look for the SOF0 header 0xffc0; its structure is
[ffc0 SSSS PPHH HHWW ...] where PP is 8bit precision, HHHH 16bit height, WWWW width
"""
with open(path, 'rb') as f:
_, header_type, size = struct.unpack('>HHH', f.read(6))
while header_type != 0xffc0:
f.seek(size - 2, 1)
header_type, size = struct.unpack('>HH', f.read(4))
bpi, height, width = struct.unpack('>BHH', f.read(5))
return width, height
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
| # -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
Add homemade fast width/height reader for JPEG files# -*- coding: utf-8 -*-
import struct
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def jpeg_size(path):
"""Get image size.
Structure of JPEG file is:
ffd8 [ffXX SSSS DD DD ...] [ffYY SSSS DDDD ...] (S is 16bit size, D the data)
We look for the SOF0 header 0xffc0; its structure is
[ffc0 SSSS PPHH HHWW ...] where PP is 8bit precision, HHHH 16bit height, WWWW width
"""
with open(path, 'rb') as f:
_, header_type, size = struct.unpack('>HHH', f.read(6))
while header_type != 0xffc0:
f.seek(size - 2, 1)
header_type, size = struct.unpack('>HH', f.read(4))
bpi, height, width = struct.unpack('>BHH', f.read(5))
return width, height
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
| <commit_before># -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
<commit_msg>Add homemade fast width/height reader for JPEG files<commit_after># -*- coding: utf-8 -*-
import struct
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def jpeg_size(path):
"""Get image size.
Structure of JPEG file is:
ffd8 [ffXX SSSS DD DD ...] [ffYY SSSS DDDD ...] (S is 16bit size, D the data)
We look for the SOF0 header 0xffc0; its structure is
[ffc0 SSSS PPHH HHWW ...] where PP is 8bit precision, HHHH 16bit height, WWWW width
"""
with open(path, 'rb') as f:
_, header_type, size = struct.unpack('>HHH', f.read(6))
while header_type != 0xffc0:
f.seek(size - 2, 1)
header_type, size = struct.unpack('>HH', f.read(4))
bpi, height, width = struct.unpack('>BHH', f.read(5))
return width, height
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
c46e472755c7b7dd450626e136f31a29ca9a5321 | rbtools/utils/users.py | rbtools/utils/users.py | from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.links.user.title
| from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
return None
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
user = get_user(api_client, api_root, auth_required)
if user:
return user.username
return None
| Fix a regression in accessing the username for the session. | Fix a regression in accessing the username for the session.
My previous optimization to fetching the user resource along with the
session broke the `get_username()` function, which attempted to follow a
now non-existent link. It's been updated to get the expanded user
resource instead and access the username from that.
Testing Done:
Ran `rbt status`. Before, it would crash with an attribute error. After,
it showed me a list of my open review requests.
Reviewed at https://reviews.reviewboard.org/r/6900/
| Python | mit | reviewboard/rbtools,halvorlu/rbtools,beol/rbtools,datjwu/rbtools,davidt/rbtools,datjwu/rbtools,davidt/rbtools,reviewboard/rbtools,haosdent/rbtools,halvorlu/rbtools,haosdent/rbtools,haosdent/rbtools,reviewboard/rbtools,beol/rbtools,beol/rbtools,datjwu/rbtools,halvorlu/rbtools,davidt/rbtools | from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.links.user.title
Fix a regression in accessing the username for the session.
My previous optimization to fetching the user resource along with the
session broke the `get_username()` function, which attempted to follow a
now non-existent link. It's been updated to get the expanded user
resource instead and access the username from that.
Testing Done:
Ran `rbt status`. Before, it would crash with an attribute error. After,
it showed me a list of my open review requests.
Reviewed at https://reviews.reviewboard.org/r/6900/ | from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
return None
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
user = get_user(api_client, api_root, auth_required)
if user:
return user.username
return None
| <commit_before>from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.links.user.title
<commit_msg>Fix a regression in accessing the username for the session.
My previous optimization to fetching the user resource along with the
session broke the `get_username()` function, which attempted to follow a
now non-existent link. It's been updated to get the expanded user
resource instead and access the username from that.
Testing Done:
Ran `rbt status`. Before, it would crash with an attribute error. After,
it showed me a list of my open review requests.
Reviewed at https://reviews.reviewboard.org/r/6900/<commit_after> | from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
return None
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
user = get_user(api_client, api_root, auth_required)
if user:
return user.username
return None
| from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.links.user.title
Fix a regression in accessing the username for the session.
My previous optimization to fetching the user resource along with the
session broke the `get_username()` function, which attempted to follow a
now non-existent link. It's been updated to get the expanded user
resource instead and access the username from that.
Testing Done:
Ran `rbt status`. Before, it would crash with an attribute error. After,
it showed me a list of my open review requests.
Reviewed at https://reviews.reviewboard.org/r/6900/from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
return None
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
user = get_user(api_client, api_root, auth_required)
if user:
return user.username
return None
| <commit_before>from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.links.user.title
<commit_msg>Fix a regression in accessing the username for the session.
My previous optimization to fetching the user resource along with the
session broke the `get_username()` function, which attempted to follow a
now non-existent link. It's been updated to get the expanded user
resource instead and access the username from that.
Testing Done:
Ran `rbt status`. Before, it would crash with an attribute error. After,
it showed me a list of my open review requests.
Reviewed at https://reviews.reviewboard.org/r/6900/<commit_after>from __future__ import unicode_literals
import getpass
import logging
import sys
from six.moves import input
from rbtools.api.errors import AuthorizationError
from rbtools.commands import CommandError
def get_authenticated_session(api_client, api_root, auth_required=False):
"""Return an authenticated session.
None will be returned if the user is not authenticated, unless the
'auth_required' parameter is True, in which case the user will be prompted
to login.
"""
session = api_root.get_session(expand='user')
if not session.authenticated:
if not auth_required:
return None
logging.warning('You are not authenticated with the Review Board '
'server at %s, please login.' % api_client.url)
sys.stderr.write('Username: ')
username = input()
password = getpass.getpass(b'Password: ')
api_client.login(username, password)
try:
session = session.get_self()
except AuthorizationError:
raise CommandError('You are not authenticated.')
return session
def get_user(api_client, api_root, auth_required=False):
"""Return the user resource for the current session."""
session = get_authenticated_session(api_client, api_root, auth_required)
if session:
return session.user
return None
def get_username(api_client, api_root, auth_required=False):
"""Return the username for the current session."""
user = get_user(api_client, api_root, auth_required)
if user:
return user.username
return None
|
0cc7fbea3952485e8274c8df1b223fc791181035 | ona_migration_script/migrate_toilets.py | ona_migration_script/migrate_toilets.py | import argparse
from ona import OnaApiClient
def generate_location(lat, lon):
return ' '.join([str(lat), str(lon)])
CONVERSIONS = {
'code': 'toilet_code', 'section': 'toilet_section',
'cluster': 'toilet_cluster'}
ADDITIONS = {
'toilet_location': (generate_location, ['lat', 'lon'])
}
DEFAULTS = {
'toilet_state': 'no_issue', 'toilet_issue': '', 'toilet_issue_date': ''}
parser = argparse.ArgumentParser(description='Migrate submissions')
parser.add_argument(
'url', type=str,
help='The full URL to get the JSON toilet information from')
parser.add_argument(
'to_id', type=str,
help="The id (number) of the form to migrate submissions to")
parser.add_argument(
'username', type=str, help='The Ona username used to log in')
parser.add_argument(
'password', type=str, help='The Ona password used to log in')
args = parser.parse_args()
client = OnaApiClient(args.username, args.password)
def get_toilet_info_from_django():
url = args.url
headers = {
"Content-type": "application/json; charset=utf-8"
}
r = client.session.request(
'GET', url, headers=headers)
r.raise_for_status()
return r.json()
def get_fields_from_form(formid):
form = client.get_form_information(formid)
fields = []
for child in form.get('children'):
fields.append(child.get('name'))
return fields
toilet_data = get_toilet_info_from_django()
to_fields = get_fields_from_form(args.to_id)
for toilet in toilet_data:
new_toilet = toilet.copy()
# Add fields
for field, (function, arguments) in ADDITIONS.iteritems():
arguments = [toilet[arg] for arg in arguments]
new_toilet[field] = function(*arguments)
# Migrate fields
for field in toilet:
if field in CONVERSIONS:
new_toilet[CONVERSIONS[field]] = toilet[field]
# Remove deleted fields
if field not in to_fields:
del new_toilet[field]
# Add missing fields
for field in to_fields:
if field not in new_toilet:
new_toilet[field] = DEFAULTS.get(field, None)
# Post submission to new form
form_id_string = client.get_form(args.to_id)['id_string']
try:
client.submission({
"id": form_id_string,
"submission": new_toilet,
})
except:
print "Error sending form %s. Submission: " % form_id_string
print new_toilet
| Complete migrate from django to toilets script | Complete migrate from django to toilets script
| Python | bsd-3-clause | praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js | Complete migrate from django to toilets script | import argparse
from ona import OnaApiClient
def generate_location(lat, lon):
return ' '.join([str(lat), str(lon)])
CONVERSIONS = {
'code': 'toilet_code', 'section': 'toilet_section',
'cluster': 'toilet_cluster'}
ADDITIONS = {
'toilet_location': (generate_location, ['lat', 'lon'])
}
DEFAULTS = {
'toilet_state': 'no_issue', 'toilet_issue': '', 'toilet_issue_date': ''}
parser = argparse.ArgumentParser(description='Migrate submissions')
parser.add_argument(
'url', type=str,
help='The full URL to get the JSON toilet information from')
parser.add_argument(
'to_id', type=str,
help="The id (number) of the form to migrate submissions to")
parser.add_argument(
'username', type=str, help='The Ona username used to log in')
parser.add_argument(
'password', type=str, help='The Ona password used to log in')
args = parser.parse_args()
client = OnaApiClient(args.username, args.password)
def get_toilet_info_from_django():
url = args.url
headers = {
"Content-type": "application/json; charset=utf-8"
}
r = client.session.request(
'GET', url, headers=headers)
r.raise_for_status()
return r.json()
def get_fields_from_form(formid):
form = client.get_form_information(formid)
fields = []
for child in form.get('children'):
fields.append(child.get('name'))
return fields
toilet_data = get_toilet_info_from_django()
to_fields = get_fields_from_form(args.to_id)
for toilet in toilet_data:
new_toilet = toilet.copy()
# Add fields
for field, (function, arguments) in ADDITIONS.iteritems():
arguments = [toilet[arg] for arg in arguments]
new_toilet[field] = function(*arguments)
# Migrate fields
for field in toilet:
if field in CONVERSIONS:
new_toilet[CONVERSIONS[field]] = toilet[field]
# Remove deleted fields
if field not in to_fields:
del new_toilet[field]
# Add missing fields
for field in to_fields:
if field not in new_toilet:
new_toilet[field] = DEFAULTS.get(field, None)
# Post submission to new form
form_id_string = client.get_form(args.to_id)['id_string']
try:
client.submission({
"id": form_id_string,
"submission": new_toilet,
})
except:
print "Error sending form %s. Submission: " % form_id_string
print new_toilet
| <commit_before><commit_msg>Complete migrate from django to toilets script<commit_after> | import argparse
from ona import OnaApiClient
def generate_location(lat, lon):
return ' '.join([str(lat), str(lon)])
CONVERSIONS = {
'code': 'toilet_code', 'section': 'toilet_section',
'cluster': 'toilet_cluster'}
ADDITIONS = {
'toilet_location': (generate_location, ['lat', 'lon'])
}
DEFAULTS = {
'toilet_state': 'no_issue', 'toilet_issue': '', 'toilet_issue_date': ''}
parser = argparse.ArgumentParser(description='Migrate submissions')
parser.add_argument(
'url', type=str,
help='The full URL to get the JSON toilet information from')
parser.add_argument(
'to_id', type=str,
help="The id (number) of the form to migrate submissions to")
parser.add_argument(
'username', type=str, help='The Ona username used to log in')
parser.add_argument(
'password', type=str, help='The Ona password used to log in')
args = parser.parse_args()
client = OnaApiClient(args.username, args.password)
def get_toilet_info_from_django():
url = args.url
headers = {
"Content-type": "application/json; charset=utf-8"
}
r = client.session.request(
'GET', url, headers=headers)
r.raise_for_status()
return r.json()
def get_fields_from_form(formid):
form = client.get_form_information(formid)
fields = []
for child in form.get('children'):
fields.append(child.get('name'))
return fields
toilet_data = get_toilet_info_from_django()
to_fields = get_fields_from_form(args.to_id)
for toilet in toilet_data:
new_toilet = toilet.copy()
# Add fields
for field, (function, arguments) in ADDITIONS.iteritems():
arguments = [toilet[arg] for arg in arguments]
new_toilet[field] = function(*arguments)
# Migrate fields
for field in toilet:
if field in CONVERSIONS:
new_toilet[CONVERSIONS[field]] = toilet[field]
# Remove deleted fields
if field not in to_fields:
del new_toilet[field]
# Add missing fields
for field in to_fields:
if field not in new_toilet:
new_toilet[field] = DEFAULTS.get(field, None)
# Post submission to new form
form_id_string = client.get_form(args.to_id)['id_string']
try:
client.submission({
"id": form_id_string,
"submission": new_toilet,
})
except:
print "Error sending form %s. Submission: " % form_id_string
print new_toilet
| Complete migrate from django to toilets scriptimport argparse
from ona import OnaApiClient
def generate_location(lat, lon):
return ' '.join([str(lat), str(lon)])
CONVERSIONS = {
'code': 'toilet_code', 'section': 'toilet_section',
'cluster': 'toilet_cluster'}
ADDITIONS = {
'toilet_location': (generate_location, ['lat', 'lon'])
}
DEFAULTS = {
'toilet_state': 'no_issue', 'toilet_issue': '', 'toilet_issue_date': ''}
parser = argparse.ArgumentParser(description='Migrate submissions')
parser.add_argument(
'url', type=str,
help='The full URL to get the JSON toilet information from')
parser.add_argument(
'to_id', type=str,
help="The id (number) of the form to migrate submissions to")
parser.add_argument(
'username', type=str, help='The Ona username used to log in')
parser.add_argument(
'password', type=str, help='The Ona password used to log in')
args = parser.parse_args()
client = OnaApiClient(args.username, args.password)
def get_toilet_info_from_django():
url = args.url
headers = {
"Content-type": "application/json; charset=utf-8"
}
r = client.session.request(
'GET', url, headers=headers)
r.raise_for_status()
return r.json()
def get_fields_from_form(formid):
form = client.get_form_information(formid)
fields = []
for child in form.get('children'):
fields.append(child.get('name'))
return fields
toilet_data = get_toilet_info_from_django()
to_fields = get_fields_from_form(args.to_id)
for toilet in toilet_data:
new_toilet = toilet.copy()
# Add fields
for field, (function, arguments) in ADDITIONS.iteritems():
arguments = [toilet[arg] for arg in arguments]
new_toilet[field] = function(*arguments)
# Migrate fields
for field in toilet:
if field in CONVERSIONS:
new_toilet[CONVERSIONS[field]] = toilet[field]
# Remove deleted fields
if field not in to_fields:
del new_toilet[field]
# Add missing fields
for field in to_fields:
if field not in new_toilet:
new_toilet[field] = DEFAULTS.get(field, None)
# Post submission to new form
form_id_string = client.get_form(args.to_id)['id_string']
try:
client.submission({
"id": form_id_string,
"submission": new_toilet,
})
except:
print "Error sending form %s. Submission: " % form_id_string
print new_toilet
| <commit_before><commit_msg>Complete migrate from django to toilets script<commit_after>import argparse
from ona import OnaApiClient
def generate_location(lat, lon):
return ' '.join([str(lat), str(lon)])
CONVERSIONS = {
'code': 'toilet_code', 'section': 'toilet_section',
'cluster': 'toilet_cluster'}
ADDITIONS = {
'toilet_location': (generate_location, ['lat', 'lon'])
}
DEFAULTS = {
'toilet_state': 'no_issue', 'toilet_issue': '', 'toilet_issue_date': ''}
parser = argparse.ArgumentParser(description='Migrate submissions')
parser.add_argument(
'url', type=str,
help='The full URL to get the JSON toilet information from')
parser.add_argument(
'to_id', type=str,
help="The id (number) of the form to migrate submissions to")
parser.add_argument(
'username', type=str, help='The Ona username used to log in')
parser.add_argument(
'password', type=str, help='The Ona password used to log in')
args = parser.parse_args()
client = OnaApiClient(args.username, args.password)
def get_toilet_info_from_django():
url = args.url
headers = {
"Content-type": "application/json; charset=utf-8"
}
r = client.session.request(
'GET', url, headers=headers)
r.raise_for_status()
return r.json()
def get_fields_from_form(formid):
form = client.get_form_information(formid)
fields = []
for child in form.get('children'):
fields.append(child.get('name'))
return fields
toilet_data = get_toilet_info_from_django()
to_fields = get_fields_from_form(args.to_id)
for toilet in toilet_data:
new_toilet = toilet.copy()
# Add fields
for field, (function, arguments) in ADDITIONS.iteritems():
arguments = [toilet[arg] for arg in arguments]
new_toilet[field] = function(*arguments)
# Migrate fields
for field in toilet:
if field in CONVERSIONS:
new_toilet[CONVERSIONS[field]] = toilet[field]
# Remove deleted fields
if field not in to_fields:
del new_toilet[field]
# Add missing fields
for field in to_fields:
if field not in new_toilet:
new_toilet[field] = DEFAULTS.get(field, None)
# Post submission to new form
form_id_string = client.get_form(args.to_id)['id_string']
try:
client.submission({
"id": form_id_string,
"submission": new_toilet,
})
except:
print "Error sending form %s. Submission: " % form_id_string
print new_toilet
| |
5f1f1145d4f01f4b30e8782d284feb44781c21ad | tests/cupyx_tests/scipy_tests/special_tests/test_ufunc_dispatch.py | tests/cupyx_tests/scipy_tests/special_tests/test_ufunc_dispatch.py | import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", cupyx_scipy_ufuncs & scipy_ufuncs)
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
| import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", sorted(cupyx_scipy_ufuncs & scipy_ufuncs))
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
| Use sorted on the set to parametrize tests so that pytest-xdist works | Use sorted on the set to parametrize tests so that pytest-xdist works
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", cupyx_scipy_ufuncs & scipy_ufuncs)
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
Use sorted on the set to parametrize tests so that pytest-xdist works | import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", sorted(cupyx_scipy_ufuncs & scipy_ufuncs))
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
| <commit_before>import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", cupyx_scipy_ufuncs & scipy_ufuncs)
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
<commit_msg>Use sorted on the set to parametrize tests so that pytest-xdist works<commit_after> | import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", sorted(cupyx_scipy_ufuncs & scipy_ufuncs))
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
| import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", cupyx_scipy_ufuncs & scipy_ufuncs)
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
Use sorted on the set to parametrize tests so that pytest-xdist worksimport numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", sorted(cupyx_scipy_ufuncs & scipy_ufuncs))
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
| <commit_before>import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", cupyx_scipy_ufuncs & scipy_ufuncs)
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
<commit_msg>Use sorted on the set to parametrize tests so that pytest-xdist works<commit_after>import numpy
import cupy
import scipy.special
import cupyx.scipy.special
from cupy import testing
import pytest
scipy_ufuncs = {
f
for f in scipy.special.__all__
if isinstance(getattr(scipy.special, f), numpy.ufunc)
}
cupyx_scipy_ufuncs = {
f
for f in dir(cupyx.scipy.special)
if isinstance(getattr(cupyx.scipy.special, f), cupy.ufunc)
}
@testing.gpu
@testing.with_requires("scipy")
@pytest.mark.parametrize("ufunc", sorted(cupyx_scipy_ufuncs & scipy_ufuncs))
class TestUfunc:
@testing.numpy_cupy_allclose(atol=1e-5)
def test_dispatch(self, xp, ufunc):
ufunc = getattr(scipy.special, ufunc)
# some ufunc (like sph_harm) do not work with float inputs
# therefore we retrieve the types from the ufunc itself
types = ufunc.types[0]
args = [
cupy.testing.shaped_random((5,), xp, dtype=types[i])
for i in range(ufunc.nargs - 1)
]
res = ufunc(*args)
assert type(res) == xp.ndarray
return res
|
4c124f151c2f8d466840b10e7ed53395b3d587dc | UM/Math/Ray.py | UM/Math/Ray.py | from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
| from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
| Add a convenience method to get a point along a ray | Add a convenience method to get a point along a ray
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
Add a convenience method to get a point along a ray | from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
| <commit_before>from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
<commit_msg>Add a convenience method to get a point along a ray<commit_after> | from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
| from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
Add a convenience method to get a point along a rayfrom UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
| <commit_before>from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
<commit_msg>Add a convenience method to get a point along a ray<commit_after>from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
8494ece12d8fe27c4c76797083c851a96e054286 | PropensityScoreMatching/__init__.py | PropensityScoreMatching/__init__.py | # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
pass
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
| # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
import statsmodels.api as sm
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
link = sm.families.links.logit
family = sm.families.Binomial(link)
reg = sm.GLM(treated, design_matrix, family=family)
fitted_reg = reg.fit()
pscore = fitted_reg.fittedvalues
return pscore
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
| Implement propensity score fitting for logit model | Implement propensity score fitting for logit model
| Python | mit | aegorenkov/PropensityScoreMatching | # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
pass
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
Implement propensity score fitting for logit model | # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
import statsmodels.api as sm
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
link = sm.families.links.logit
family = sm.families.Binomial(link)
reg = sm.GLM(treated, design_matrix, family=family)
fitted_reg = reg.fit()
pscore = fitted_reg.fittedvalues
return pscore
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
| <commit_before># -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
pass
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
<commit_msg>Implement propensity score fitting for logit model<commit_after> | # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
import statsmodels.api as sm
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
link = sm.families.links.logit
family = sm.families.Binomial(link)
reg = sm.GLM(treated, design_matrix, family=family)
fitted_reg = reg.fit()
pscore = fitted_reg.fittedvalues
return pscore
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
| # -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
pass
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
Implement propensity score fitting for logit model# -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
import statsmodels.api as sm
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
link = sm.families.links.logit
family = sm.families.Binomial(link)
reg = sm.GLM(treated, design_matrix, family=family)
fitted_reg = reg.fit()
pscore = fitted_reg.fittedvalues
return pscore
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
| <commit_before># -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
pass
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
<commit_msg>Implement propensity score fitting for logit model<commit_after># -*- coding: utf-8 -*-
"""
Created on Mon May 18 15:09:03 2015
@author: Alexander
"""
import statsmodels.api as sm
class Match(object):
'''
Perform matching algorithm on input data and return a list of indicies
corresponding to matches.
'''
def __init__(self, match_type='neighbor'):
self.match_type = match_type
def match(self, covariates):
pass
class PropensityScoreMatching(object):
'''
Propensity Score Matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self, model='logit'):
self.model = model
def fit(self, treated, design_matrix):
'''
Run logit or probit and return propensity score column
'''
link = sm.families.links.logit
family = sm.families.Binomial(link)
reg = sm.GLM(treated, design_matrix, family=family)
fitted_reg = reg.fit()
pscore = fitted_reg.fittedvalues
return pscore
class MahalanobisMatching(object):
'''
Mahalanobis matching in Python.
Use psmatch2 to confirm accuracy.
'''
def __init__(self):
pass
|
427629ca4cfe231acd8cd4ad54470038ca03c13e | zerver/migrations/0076_userprofile_emojiset.py | zerver/migrations/0076_userprofile_emojiset.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple color emoji'), ('emojione', 'Emojione'), ('google', 'Notoemoji'), ('twitter', 'Twemoji')], default='google', max_length=20),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple style'), ('emojione', 'Emoji One style'), ('google', 'Google style'), ('twitter', 'Twitter style')], default='google', max_length=20),
),
]
| Fix strings in migration 0076. | emoji: Fix strings in migration 0076.
It's arguably a bug that Django puts the value strings into the
migration object, but this was causing test failures.
| Python | apache-2.0 | punchagan/zulip,kou/zulip,synicalsyntax/zulip,vaidap/zulip,brockwhittaker/zulip,j831/zulip,eeshangarg/zulip,jphilipsen05/zulip,jackrzhang/zulip,j831/zulip,shubhamdhama/zulip,brainwane/zulip,verma-varsha/zulip,synicalsyntax/zulip,mahim97/zulip,verma-varsha/zulip,eeshangarg/zulip,showell/zulip,eeshangarg/zulip,andersk/zulip,hackerkid/zulip,dhcrzf/zulip,mahim97/zulip,brockwhittaker/zulip,mahim97/zulip,showell/zulip,vabs22/zulip,zulip/zulip,jphilipsen05/zulip,shubhamdhama/zulip,brainwane/zulip,ryanbackman/zulip,rishig/zulip,mahim97/zulip,Galexrt/zulip,vaidap/zulip,rishig/zulip,punchagan/zulip,ryanbackman/zulip,kou/zulip,synicalsyntax/zulip,punchagan/zulip,jphilipsen05/zulip,rishig/zulip,tommyip/zulip,hackerkid/zulip,amanharitsh123/zulip,kou/zulip,christi3k/zulip,tommyip/zulip,jackrzhang/zulip,mahim97/zulip,tommyip/zulip,hackerkid/zulip,rht/zulip,jackrzhang/zulip,kou/zulip,tommyip/zulip,tommyip/zulip,Galexrt/zulip,amanharitsh123/zulip,timabbott/zulip,showell/zulip,jackrzhang/zulip,shubhamdhama/zulip,tommyip/zulip,rht/zulip,timabbott/zulip,mahim97/zulip,brockwhittaker/zulip,amanharitsh123/zulip,Galexrt/zulip,verma-varsha/zulip,kou/zulip,ryanbackman/zulip,shubhamdhama/zulip,christi3k/zulip,christi3k/zulip,christi3k/zulip,vaidap/zulip,jackrzhang/zulip,ryanbackman/zulip,showell/zulip,rht/zulip,zulip/zulip,brainwane/zulip,andersk/zulip,jphilipsen05/zulip,andersk/zulip,Galexrt/zulip,vabs22/zulip,jackrzhang/zulip,punchagan/zulip,rht/zulip,jrowan/zulip,timabbott/zulip,jrowan/zulip,rishig/zulip,dhcrzf/zulip,andersk/zulip,vabs22/zulip,kou/zulip,hackerkid/zulip,jphilipsen05/zulip,amanharitsh123/zulip,verma-varsha/zulip,brockwhittaker/zulip,j831/zulip,synicalsyntax/zulip,zulip/zulip,punchagan/zulip,verma-varsha/zulip,j831/zulip,jphilipsen05/zulip,vaidap/zulip,rishig/zulip,synicalsyntax/zulip,vabs22/zulip,punchagan/zulip,christi3k/zulip,Galexrt/zulip,Galexrt/zulip,brainwane/zulip,hackerkid/zulip,zulip/zulip,brainwane/zulip,synicalsyntax/zulip,rishig/zulip,j831/zulip,j831/zulip,christi3k/zulip,ryanbackman/zulip,eeshangarg/zulip,brockwhittaker/zulip,showell/zulip,showell/zulip,dhcrzf/zulip,hackerkid/zulip,tommyip/zulip,amanharitsh123/zulip,showell/zulip,andersk/zulip,timabbott/zulip,kou/zulip,dhcrzf/zulip,brockwhittaker/zulip,vabs22/zulip,andersk/zulip,timabbott/zulip,brainwane/zulip,brainwane/zulip,eeshangarg/zulip,Galexrt/zulip,verma-varsha/zulip,shubhamdhama/zulip,rht/zulip,rht/zulip,synicalsyntax/zulip,jrowan/zulip,rishig/zulip,jrowan/zulip,zulip/zulip,jrowan/zulip,dhcrzf/zulip,rht/zulip,ryanbackman/zulip,zulip/zulip,timabbott/zulip,shubhamdhama/zulip,vaidap/zulip,shubhamdhama/zulip,dhcrzf/zulip,eeshangarg/zulip,punchagan/zulip,andersk/zulip,eeshangarg/zulip,amanharitsh123/zulip,timabbott/zulip,hackerkid/zulip,dhcrzf/zulip,zulip/zulip,jackrzhang/zulip,vabs22/zulip,jrowan/zulip,vaidap/zulip | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple color emoji'), ('emojione', 'Emojione'), ('google', 'Notoemoji'), ('twitter', 'Twemoji')], default='google', max_length=20),
),
]
emoji: Fix strings in migration 0076.
It's arguably a bug that Django puts the value strings into the
migration object, but this was causing test failures. | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple style'), ('emojione', 'Emoji One style'), ('google', 'Google style'), ('twitter', 'Twitter style')], default='google', max_length=20),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple color emoji'), ('emojione', 'Emojione'), ('google', 'Notoemoji'), ('twitter', 'Twemoji')], default='google', max_length=20),
),
]
<commit_msg>emoji: Fix strings in migration 0076.
It's arguably a bug that Django puts the value strings into the
migration object, but this was causing test failures.<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple style'), ('emojione', 'Emoji One style'), ('google', 'Google style'), ('twitter', 'Twitter style')], default='google', max_length=20),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple color emoji'), ('emojione', 'Emojione'), ('google', 'Notoemoji'), ('twitter', 'Twemoji')], default='google', max_length=20),
),
]
emoji: Fix strings in migration 0076.
It's arguably a bug that Django puts the value strings into the
migration object, but this was causing test failures.# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple style'), ('emojione', 'Emoji One style'), ('google', 'Google style'), ('twitter', 'Twitter style')], default='google', max_length=20),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple color emoji'), ('emojione', 'Emojione'), ('google', 'Notoemoji'), ('twitter', 'Twemoji')], default='google', max_length=20),
),
]
<commit_msg>emoji: Fix strings in migration 0076.
It's arguably a bug that Django puts the value strings into the
migration object, but this was causing test failures.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-23 19:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zerver', '0075_attachment_path_id_unique'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('apple', 'Apple style'), ('emojione', 'Emoji One style'), ('google', 'Google style'), ('twitter', 'Twitter style')], default='google', max_length=20),
),
]
|
eabbd0468e7334dfb5d4866baadb7b4265d8536f | virtool/caches/utils.py | virtool/caches/utils.py | import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) | """
Utilities used for working with cache files within analysis workflows.
"""
import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) | Update virtool.caches docstrings and typing | Update virtool.caches docstrings and typing
| Python | mit | virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool | import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"])Update virtool.caches docstrings and typing | """
Utilities used for working with cache files within analysis workflows.
"""
import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) | <commit_before>import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"])<commit_msg>Update virtool.caches docstrings and typing<commit_after> | """
Utilities used for working with cache files within analysis workflows.
"""
import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) | import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"])Update virtool.caches docstrings and typing"""
Utilities used for working with cache files within analysis workflows.
"""
import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) | <commit_before>import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"])<commit_msg>Update virtool.caches docstrings and typing<commit_after>"""
Utilities used for working with cache files within analysis workflows.
"""
import os
from typing import Union
import virtool.samples.utils
def join_cache_path(settings: dict, cache_id: str):
"""
Create a cache path string given the application settings and cache id.
:param settings: the application settings
:param cache_id: the id of the cache
:return: a cache path
"""
return os.path.join(settings["data_path"], "caches", cache_id)
def join_cache_read_paths(settings: dict, cache: dict) -> Union[list, None]:
"""
Return a list of read paths for a cache given the application settings and the cache document.
The path list will contain two paths if paired, and one if not.
:param settings: the application settings
:param cache: a cache document
:return: a list of read paths
"""
if not cache:
return None
cache_path = join_cache_path(settings, cache["id"])
return virtool.samples.utils.join_read_paths(cache_path, cache["paired"]) |
3bef86bd3637642587ed15680249c278504fc4fb | pontoon/administration/management/commands/update_projects.py | pontoon/administration/management/commands/update_projects.py |
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
|
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
| Add timestamp and newline to log messages | Add timestamp and newline to log messages
| Python | bsd-3-clause | mathjazz/pontoon,yfdyh000/pontoon,jotes/pontoon,mozilla/pontoon,sudheesh001/pontoon,mathjazz/pontoon,jotes/pontoon,vivekanand1101/pontoon,mastizada/pontoon,jotes/pontoon,m8ttyB/pontoon,Jobava/mirror-pontoon,Jobava/mirror-pontoon,participedia/pontoon,jotes/pontoon,yfdyh000/pontoon,m8ttyB/pontoon,vivekanand1101/pontoon,vivekanand1101/pontoon,sudheesh001/pontoon,sudheesh001/pontoon,Osmose/pontoon,m8ttyB/pontoon,Osmose/pontoon,Osmose/pontoon,participedia/pontoon,sudheesh001/pontoon,Jobava/mirror-pontoon,mastizada/pontoon,Osmose/pontoon,Jobava/mirror-pontoon,mozilla/pontoon,mozilla/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,mathjazz/pontoon,mathjazz/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,mastizada/pontoon,mozilla/pontoon,participedia/pontoon,participedia/pontoon,mozilla/pontoon,mastizada/pontoon,yfdyh000/pontoon |
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
Add timestamp and newline to log messages |
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
| <commit_before>
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
<commit_msg>Add timestamp and newline to log messages<commit_after> |
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
Add timestamp and newline to log messages
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
| <commit_before>
import os
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
self.stdout.write('Successfully updated project "%s"' % project)
except Exception as e:
raise CommandError('UpdateProjectsFromRepositoryError: %s' % unicode(e))
<commit_msg>Add timestamp and newline to log messages<commit_after>
import os
import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from pontoon.administration.views import _update_from_repository
from pontoon.base.models import Project
class Command(BaseCommand):
help = 'Update all projects from their repositories and store changes to the database'
def handle(self, *args, **options):
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
now = datetime.datetime.now()
self.stdout.write('[%s]: Successfully updated project "%s"\n' % (now, project))
except Exception as e:
now = datetime.datetime.now()
raise CommandError('[%s]: UpdateProjectsFromRepositoryError: %s\n' % (now, unicode(e)))
|
f8a6b4d8053a60cfec372d8b91bf294d606055ec | app/admin/routes.py | app/admin/routes.py | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
| from datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm, PostForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
@admin.route('/news/post', methods=['GET', 'POST'])
@login_required
def post():
form = PostForm()
form.category.choices = [(0, 'Almenn frétt')]
if form.validate_on_submit():
flash("Fréttin hefur verið vistuð!")
return redirect(url_for('admin.news'))
return render_template('admin/post.html', form=form)
| Add a route to admin/news/post to post a news story. Uses the PostForm for forms | Add a route to admin/news/post to post a news story. Uses the PostForm for forms
| Python | mit | finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
Add a route to admin/news/post to post a news story. Uses the PostForm for forms | from datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm, PostForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
@admin.route('/news/post', methods=['GET', 'POST'])
@login_required
def post():
form = PostForm()
form.category.choices = [(0, 'Almenn frétt')]
if form.validate_on_submit():
flash("Fréttin hefur verið vistuð!")
return redirect(url_for('admin.news'))
return render_template('admin/post.html', form=form)
| <commit_before>from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
<commit_msg>Add a route to admin/news/post to post a news story. Uses the PostForm for forms<commit_after> | from datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm, PostForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
@admin.route('/news/post', methods=['GET', 'POST'])
@login_required
def post():
form = PostForm()
form.category.choices = [(0, 'Almenn frétt')]
if form.validate_on_submit():
flash("Fréttin hefur verið vistuð!")
return redirect(url_for('admin.news'))
return render_template('admin/post.html', form=form)
| from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
Add a route to admin/news/post to post a news story. Uses the PostForm for formsfrom datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm, PostForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
@admin.route('/news/post', methods=['GET', 'POST'])
@login_required
def post():
form = PostForm()
form.category.choices = [(0, 'Almenn frétt')]
if form.validate_on_submit():
flash("Fréttin hefur verið vistuð!")
return redirect(url_for('admin.news'))
return render_template('admin/post.html', form=form)
| <commit_before>from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
<commit_msg>Add a route to admin/news/post to post a news story. Uses the PostForm for forms<commit_after>from datetime import datetime
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm, PostForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
@admin.route('/news/post', methods=['GET', 'POST'])
@login_required
def post():
form = PostForm()
form.category.choices = [(0, 'Almenn frétt')]
if form.validate_on_submit():
flash("Fréttin hefur verið vistuð!")
return redirect(url_for('admin.news'))
return render_template('admin/post.html', form=form)
|
1c867959ea1a4fa61136be8d21e83bb13f26577f | app/interact_app.py | app/interact_app.py | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
| import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
# Assumes config['URL'] has a trailing slash
base_url = config['URL'][:-1]
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
| Remove trailing slash from base_url | Remove trailing slash from base_url
| Python | apache-2.0 | data-8/DS8-Interact,data-8/DS8-Interact,data-8/DS8-Interact | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
Remove trailing slash from base_url | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
# Assumes config['URL'] has a trailing slash
base_url = config['URL'][:-1]
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
| <commit_before>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
<commit_msg>Remove trailing slash from base_url<commit_after> | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
# Assumes config['URL'] has a trailing slash
base_url = config['URL'][:-1]
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
| import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
Remove trailing slash from base_urlimport os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
# Assumes config['URL'] has a trailing slash
base_url = config['URL'][:-1]
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
| <commit_before>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
<commit_msg>Remove trailing slash from base_url<commit_after>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
# Assumes config['URL'] has a trailing slash
base_url = config['URL'][:-1]
socket_url = config['URL'] + r'socket/(\w+)'
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
static_path=os.path.join(os.path.dirname(__file__), 'static'),
# Ensure static urls are prefixed with the base url too
static_url_prefix=config['URL'] + 'static',
)
super(InteractApp, self).__init__(handlers, **settings)
|
803677bbf54503621af04a59da3a84e79d59f52b | src/penn_chime/defaults.py | src/penn_chime/defaults.py | """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: int,
known_infected: int,
relative_contact_rate: int,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
| """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: float,
known_infected: int,
relative_contact_rate: float,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
| Change Constants type for doubling_time and relative_contact_rate from int to float | Change Constants type for doubling_time and relative_contact_rate from int to float
| Python | mit | CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime | """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: int,
known_infected: int,
relative_contact_rate: int,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
Change Constants type for doubling_time and relative_contact_rate from int to float | """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: float,
known_infected: int,
relative_contact_rate: float,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
| <commit_before>"""Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: int,
known_infected: int,
relative_contact_rate: int,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
<commit_msg>Change Constants type for doubling_time and relative_contact_rate from int to float<commit_after> | """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: float,
known_infected: int,
relative_contact_rate: float,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
| """Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: int,
known_infected: int,
relative_contact_rate: int,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
Change Constants type for doubling_time and relative_contact_rate from int to float"""Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: float,
known_infected: int,
relative_contact_rate: float,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
| <commit_before>"""Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: int,
known_infected: int,
relative_contact_rate: int,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
<commit_msg>Change Constants type for doubling_time and relative_contact_rate from int to float<commit_after>"""Defaults."""
from .utils import RateLos
class Regions:
"""Arbitrary number of counties."""
def __init__(self, **kwargs):
population = 0
for key, value in kwargs.items():
setattr(self, key, value)
population += value
self.population = population
class Constants:
def __init__(
self,
*,
current_hospitalized: int,
doubling_time: float,
known_infected: int,
relative_contact_rate: float,
region: Regions,
hospitalized: RateLos,
icu: RateLos,
ventilated: RateLos,
as_date: bool = False,
market_share: float = 1.0,
max_y_axis: int = None,
n_days: int = 60,
recovery_days: int = 14,
):
self.region = region
self.current_hospitalized = current_hospitalized
self.known_infected = known_infected
self.doubling_time = doubling_time
self.relative_contact_rate = relative_contact_rate
self.hospitalized = hospitalized
self.icu = icu
self.ventilated = ventilated
self.as_date = as_date
self.market_share = market_share
self.max_y_axis = max_y_axis
self.n_days = n_days
self.recovery_days = recovery_days
def __repr__(self) -> str:
return f"Constants(population_default: {self.region.population}, known_infected: {self.known_infected})"
|
1b1715c4c44c162f650d6da79f5571ac2c0f994b | utils/get_collection_object_count.py | utils/get_collection_object_count.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| Update script to use new way of calling class. | Update script to use new way of calling class.
| Python | bsd-3-clause | barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
Update script to use new way of calling class. | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Update script to use new way of calling class.<commit_after> | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
Update script to use new way of calling class.#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, 'barbarahui_test_bucket', argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Update script to use new way of calling class.<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
|
267c6591ef7ab3354b0965902585203fbfe04dee | pybot/http_client.py | pybot/http_client.py | import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
request = requests.get(path, headers=headers)
if request.status_code is not 200:
error = self.get_error_from_request(request)
completion(None, error)
else:
json_ = self.get_json(request.text)
completion(json_, None)
elif method == 'POST':
raise NotImplementedError
def get_error_from_request(self, request):
return {
'error': self.get_json(request.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
| import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
response = requests.get(path, headers=headers)
if response.status_code is not 200:
error = self.get_error_from_request(response)
completion(None, error)
else:
json_ = self.get_json(response.text)
completion(json_, None)
elif method == 'POST':
re
def get_error_from_response(self, response):
return {
'error': self.get_json(response.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
| Change wording to use response | Change wording to use response
| Python | mit | ben-cunningham/pybot,ben-cunningham/python-messenger-bot | import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
request = requests.get(path, headers=headers)
if request.status_code is not 200:
error = self.get_error_from_request(request)
completion(None, error)
else:
json_ = self.get_json(request.text)
completion(json_, None)
elif method == 'POST':
raise NotImplementedError
def get_error_from_request(self, request):
return {
'error': self.get_json(request.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
Change wording to use response | import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
response = requests.get(path, headers=headers)
if response.status_code is not 200:
error = self.get_error_from_request(response)
completion(None, error)
else:
json_ = self.get_json(response.text)
completion(json_, None)
elif method == 'POST':
re
def get_error_from_response(self, response):
return {
'error': self.get_json(response.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
| <commit_before>import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
request = requests.get(path, headers=headers)
if request.status_code is not 200:
error = self.get_error_from_request(request)
completion(None, error)
else:
json_ = self.get_json(request.text)
completion(json_, None)
elif method == 'POST':
raise NotImplementedError
def get_error_from_request(self, request):
return {
'error': self.get_json(request.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
<commit_msg>Change wording to use response<commit_after> | import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
response = requests.get(path, headers=headers)
if response.status_code is not 200:
error = self.get_error_from_request(response)
completion(None, error)
else:
json_ = self.get_json(response.text)
completion(json_, None)
elif method == 'POST':
re
def get_error_from_response(self, response):
return {
'error': self.get_json(response.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
| import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
request = requests.get(path, headers=headers)
if request.status_code is not 200:
error = self.get_error_from_request(request)
completion(None, error)
else:
json_ = self.get_json(request.text)
completion(json_, None)
elif method == 'POST':
raise NotImplementedError
def get_error_from_request(self, request):
return {
'error': self.get_json(request.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
Change wording to use responseimport requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
response = requests.get(path, headers=headers)
if response.status_code is not 200:
error = self.get_error_from_request(response)
completion(None, error)
else:
json_ = self.get_json(response.text)
completion(json_, None)
elif method == 'POST':
re
def get_error_from_response(self, response):
return {
'error': self.get_json(response.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
| <commit_before>import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
request = requests.get(path, headers=headers)
if request.status_code is not 200:
error = self.get_error_from_request(request)
completion(None, error)
else:
json_ = self.get_json(request.text)
completion(json_, None)
elif method == 'POST':
raise NotImplementedError
def get_error_from_request(self, request):
return {
'error': self.get_json(request.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
<commit_msg>Change wording to use response<commit_after>import requests, json
from resources.urls import FACEBOOK_MESSAGES_POST_URL
class HttpClient():
"""
Client which excutes the call to
facebook's messenger api
"""
def submit_request(self, path, method, payload, completion):
assert len(path) > 0
path = self.get_api_url(path)
headers = self.get_headers()
if method == 'GET':
response = requests.get(path, headers=headers)
if response.status_code is not 200:
error = self.get_error_from_request(response)
completion(None, error)
else:
json_ = self.get_json(response.text)
completion(json_, None)
elif method == 'POST':
re
def get_error_from_response(self, response):
return {
'error': self.get_json(response.text)
}
def get_json(self, string):
return json.loads(string)
def get_api_url(self, path):
return FACEBOOK_MESSAGES_POST_URL + path
def get_headers(self):
return {}
|
ed044a79347fcde11416c51a5c577fe2cc467050 | pyfr/readers/base.py | pyfr/readers/base.py | # -*- coding: utf-8 -*-
import uuid
from abc import ABCMeta, abstractmethod
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
| # -*- coding: utf-8 -*-
import re
import uuid
import itertools as it
from abc import ABCMeta, abstractmethod
import numpy as np
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def _optimize(self, mesh):
# Sort interior interfaces
for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh):
mesh[f] = mesh[f][:,np.argsort(mesh[f][0])]
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Perform some simple optimizations on the mesh
self._optimize(mesh)
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
| Add some simple optimizations into the mesh reader classes. | Add some simple optimizations into the mesh reader classes.
This yields a ~1.5% performance improvement.
| Python | bsd-3-clause | tjcorona/PyFR,tjcorona/PyFR,BrianVermeire/PyFR,tjcorona/PyFR,Aerojspark/PyFR,iyer-arvind/PyFR | # -*- coding: utf-8 -*-
import uuid
from abc import ABCMeta, abstractmethod
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
Add some simple optimizations into the mesh reader classes.
This yields a ~1.5% performance improvement. | # -*- coding: utf-8 -*-
import re
import uuid
import itertools as it
from abc import ABCMeta, abstractmethod
import numpy as np
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def _optimize(self, mesh):
# Sort interior interfaces
for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh):
mesh[f] = mesh[f][:,np.argsort(mesh[f][0])]
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Perform some simple optimizations on the mesh
self._optimize(mesh)
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
| <commit_before># -*- coding: utf-8 -*-
import uuid
from abc import ABCMeta, abstractmethod
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
<commit_msg>Add some simple optimizations into the mesh reader classes.
This yields a ~1.5% performance improvement.<commit_after> | # -*- coding: utf-8 -*-
import re
import uuid
import itertools as it
from abc import ABCMeta, abstractmethod
import numpy as np
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def _optimize(self, mesh):
# Sort interior interfaces
for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh):
mesh[f] = mesh[f][:,np.argsort(mesh[f][0])]
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Perform some simple optimizations on the mesh
self._optimize(mesh)
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
| # -*- coding: utf-8 -*-
import uuid
from abc import ABCMeta, abstractmethod
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
Add some simple optimizations into the mesh reader classes.
This yields a ~1.5% performance improvement.# -*- coding: utf-8 -*-
import re
import uuid
import itertools as it
from abc import ABCMeta, abstractmethod
import numpy as np
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def _optimize(self, mesh):
# Sort interior interfaces
for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh):
mesh[f] = mesh[f][:,np.argsort(mesh[f][0])]
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Perform some simple optimizations on the mesh
self._optimize(mesh)
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
| <commit_before># -*- coding: utf-8 -*-
import uuid
from abc import ABCMeta, abstractmethod
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
<commit_msg>Add some simple optimizations into the mesh reader classes.
This yields a ~1.5% performance improvement.<commit_after># -*- coding: utf-8 -*-
import re
import uuid
import itertools as it
from abc import ABCMeta, abstractmethod
import numpy as np
class BaseReader(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def _optimize(self, mesh):
# Sort interior interfaces
for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh):
mesh[f] = mesh[f][:,np.argsort(mesh[f][0])]
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Perform some simple optimizations on the mesh
self._optimize(mesh)
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
|
52f38cd00db200d0520062c27f0d305827edb7d2 | eventkit_cloud/auth/models.py | eventkit_cloud/auth/models.py | from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
# https://stackoverflow.com/questions/12754024/onetoonefield-and-deleting
def delete(self, *args, **kwargs):
self.user.delete()
return super(self.__class__, self).delete(*args, **kwargs)
def __str__(self):
return '{0}'.format(self.commonname)
| from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
def __str__(self):
return '{0}'.format(self.commonname)
| Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted." | Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted."
This reverts commit 4c77c36f447d104f492e320ca684e9a737f2b803.
| Python | bsd-3-clause | venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud | from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
# https://stackoverflow.com/questions/12754024/onetoonefield-and-deleting
def delete(self, *args, **kwargs):
self.user.delete()
return super(self.__class__, self).delete(*args, **kwargs)
def __str__(self):
return '{0}'.format(self.commonname)
Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted."
This reverts commit 4c77c36f447d104f492e320ca684e9a737f2b803. | from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
def __str__(self):
return '{0}'.format(self.commonname)
| <commit_before>from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
# https://stackoverflow.com/questions/12754024/onetoonefield-and-deleting
def delete(self, *args, **kwargs):
self.user.delete()
return super(self.__class__, self).delete(*args, **kwargs)
def __str__(self):
return '{0}'.format(self.commonname)
<commit_msg>Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted."
This reverts commit 4c77c36f447d104f492e320ca684e9a737f2b803.<commit_after> | from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
def __str__(self):
return '{0}'.format(self.commonname)
| from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
# https://stackoverflow.com/questions/12754024/onetoonefield-and-deleting
def delete(self, *args, **kwargs):
self.user.delete()
return super(self.__class__, self).delete(*args, **kwargs)
def __str__(self):
return '{0}'.format(self.commonname)
Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted."
This reverts commit 4c77c36f447d104f492e320ca684e9a737f2b803.from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
def __str__(self):
return '{0}'.format(self.commonname)
| <commit_before>from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
# https://stackoverflow.com/questions/12754024/onetoonefield-and-deleting
def delete(self, *args, **kwargs):
self.user.delete()
return super(self.__class__, self).delete(*args, **kwargs)
def __str__(self):
return '{0}'.format(self.commonname)
<commit_msg>Revert "adding delete hook so the attached User object is deleted properly when and OAuth object is deleted."
This reverts commit 4c77c36f447d104f492e320ca684e9a737f2b803.<commit_after>from django.contrib.auth.models import User,Group
from django.db import models
from django.contrib.postgres.fields import JSONField
from ..core.models import TimeStampedModelMixin, UIDMixin
class OAuth(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=False)
identification = models.CharField(max_length=200, unique=True, blank=False)
commonname = models.CharField(max_length=100, blank=False)
user_info = JSONField(default={})
class Meta: # pragma: no cover
managed = True
db_table = 'auth_oauth'
def __str__(self):
return '{0}'.format(self.commonname)
|
e2c9b6e7b8eb3d55d35534fc68e9e2c692ad88a4 | openerp/addons/base/migrations/8.0.1.3/pre-migration.py | openerp/addons/base/migrations/8.0.1.3/pre-migration.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
| Remove a line in order to be pep8 compliant after merge | Remove a line in order to be pep8 compliant after merge
| Python | agpl-3.0 | sebalix/OpenUpgrade,damdam-s/OpenUpgrade,kirca/OpenUpgrade,pedrobaeza/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,csrocha/OpenUpgrade,bwrsandman/OpenUpgrade,0k/OpenUpgrade,pedrobaeza/OpenUpgrade,pedrobaeza/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,blaggacao/OpenUpgrade,grap/OpenUpgrade,mvaled/OpenUpgrade,Endika/OpenUpgrade,mvaled/OpenUpgrade,Endika/OpenUpgrade,bwrsandman/OpenUpgrade,hifly/OpenUpgrade,mvaled/OpenUpgrade,florentx/OpenUpgrade,sebalix/OpenUpgrade,sebalix/OpenUpgrade,florentx/OpenUpgrade,damdam-s/OpenUpgrade,damdam-s/OpenUpgrade,blaggacao/OpenUpgrade,bwrsandman/OpenUpgrade,mvaled/OpenUpgrade,florentx/OpenUpgrade,hifly/OpenUpgrade,sebalix/OpenUpgrade,florentx/OpenUpgrade,hifly/OpenUpgrade,csrocha/OpenUpgrade,damdam-s/OpenUpgrade,blaggacao/OpenUpgrade,pedrobaeza/OpenUpgrade,sebalix/OpenUpgrade,grap/OpenUpgrade,damdam-s/OpenUpgrade,Endika/OpenUpgrade,pedrobaeza/OpenUpgrade,kirca/OpenUpgrade,pedrobaeza/OpenUpgrade,blaggacao/OpenUpgrade,grap/OpenUpgrade,kirca/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,kirca/OpenUpgrade,grap/OpenUpgrade,florentx/OpenUpgrade,hifly/OpenUpgrade,csrocha/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,damdam-s/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,kirca/OpenUpgrade,csrocha/OpenUpgrade,blaggacao/OpenUpgrade,OpenUpgrade/OpenUpgrade,bwrsandman/OpenUpgrade,mvaled/OpenUpgrade,bwrsandman/OpenUpgrade,sebalix/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,mvaled/OpenUpgrade,blaggacao/OpenUpgrade,blaggacao/OpenUpgrade,0k/OpenUpgrade,OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,florentx/OpenUpgrade,0k/OpenUpgrade,0k/OpenUpgrade,kirca/OpenUpgrade,sebalix/OpenUpgrade,pedrobaeza/OpenUpgrade,hifly/OpenUpgrade,damdam-s/OpenUpgrade,csrocha/OpenUpgrade,bwrsandman/OpenUpgrade,Endika/OpenUpgrade,0k/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,csrocha/OpenUpgrade,csrocha/OpenUpgrade,bwrsandman/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,kirca/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,0k/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
Remove a line in order to be pep8 compliant after merge | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
<commit_msg>Remove a line in order to be pep8 compliant after merge<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
Remove a line in order to be pep8 compliant after merge# -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
<commit_msg>Remove a line in order to be pep8 compliant after merge<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenUpgrade module for Odoo
# @copyright 2014-Today: Odoo Community Association
# @author: Sylvain LE GAL <https://twitter.com/legalsylvain>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.openupgrade import openupgrade
@openupgrade.migrate()
def migrate(cr, version):
openupgrade.check_values_selection_field(
cr, 'ir_act_report_xml', 'report_type',
['controller', 'pdf', 'qweb-html', 'qweb-pdf', 'sxw', 'webkit'])
openupgrade.check_values_selection_field(
cr, 'ir_ui_view', 'type', [
'calendar', 'diagram', 'form', 'gantt', 'graph', 'kanban',
'qweb', 'search', 'tree'])
|
20c52fdaf8f0eaefd9d857b37d89e7b429cc3013 | tests/functional/test_requests.py | tests/functional/test_requests.py | from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
| def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
| Update requests test for the new funcargs | Update requests test for the new funcargs
| Python | mit | natefoo/pip,mujiansu/pip,harrisonfeng/pip,xavfernandez/pip,James-Firth/pip,nthall/pip,mujiansu/pip,graingert/pip,prasaianooz/pip,techtonik/pip,zorosteven/pip,rbtcollins/pip,ianw/pip,prasaianooz/pip,zorosteven/pip,ianw/pip,cjerdonek/pip,tdsmith/pip,habnabit/pip,ChristopherHogan/pip,yati-sagade/pip,alex/pip,zvezdan/pip,fiber-space/pip,alquerci/pip,willingc/pip,atdaemon/pip,pradyunsg/pip,xavfernandez/pip,pjdelport/pip,erikrose/pip,jamezpolley/pip,RonnyPfannschmidt/pip,rbtcollins/pip,supriyantomaftuh/pip,James-Firth/pip,pfmoore/pip,squidsoup/pip,davidovich/pip,habnabit/pip,erikrose/pip,msabramo/pip,qwcode/pip,patricklaw/pip,haridsv/pip,wkeyword/pip,jmagnusson/pip,ncoghlan/pip,xavfernandez/pip,zvezdan/pip,jythontools/pip,zenlambda/pip,msabramo/pip,mattrobenolt/pip,dstufft/pip,jasonkying/pip,mindw/pip,Carreau/pip,qbdsoft/pip,Carreau/pip,habnabit/pip,alex/pip,jmagnusson/pip,pypa/pip,dstufft/pip,jmagnusson/pip,haridsv/pip,blarghmatey/pip,caosmo/pip,qbdsoft/pip,mindw/pip,pjdelport/pip,jasonkying/pip,ChristopherHogan/pip,cjerdonek/pip,mujiansu/pip,tdsmith/pip,h4ck3rm1k3/pip,rouge8/pip,Ivoz/pip,KarelJakubec/pip,blarghmatey/pip,Gabriel439/pip,chaoallsome/pip,wkeyword/pip,luzfcb/pip,atdaemon/pip,wkeyword/pip,willingc/pip,sigmavirus24/pip,RonnyPfannschmidt/pip,minrk/pip,atdaemon/pip,James-Firth/pip,benesch/pip,jamezpolley/pip,rouge8/pip,pjdelport/pip,ncoghlan/pip,fiber-space/pip,ChristopherHogan/pip,minrk/pip,natefoo/pip,qbdsoft/pip,sigmavirus24/pip,willingc/pip,dstufft/pip,haridsv/pip,yati-sagade/pip,alquerci/pip,davidovich/pip,blarghmatey/pip,squidsoup/pip,esc/pip,graingert/pip,zvezdan/pip,benesch/pip,esc/pip,supriyantomaftuh/pip,KarelJakubec/pip,sbidoul/pip,nthall/pip,pfmoore/pip,luzfcb/pip,Gabriel439/pip,jamezpolley/pip,jythontools/pip,chaoallsome/pip,fiber-space/pip,caosmo/pip,h4ck3rm1k3/pip,zenlambda/pip,sbidoul/pip,alex/pip,patricklaw/pip,techtonik/pip,jasonkying/pip,tdsmith/pip,jythontools/pip,squidsoup/pip,graingert/pip,yati-sagade/pip,rbtcollins/pip,esc/pip,mattrobenolt/pip,nthall/pip,zorosteven/pip,pypa/pip,Ivoz/pip,supriyantomaftuh/pip,KarelJakubec/pip,harrisonfeng/pip,techtonik/pip,ncoghlan/pip,prasaianooz/pip,chaoallsome/pip,zenlambda/pip,pradyunsg/pip,qwcode/pip,Gabriel439/pip,harrisonfeng/pip,benesch/pip,luzfcb/pip,sigmavirus24/pip,RonnyPfannschmidt/pip,mindw/pip,h4ck3rm1k3/pip,davidovich/pip,erikrose/pip,caosmo/pip,natefoo/pip,rouge8/pip | from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
Update requests test for the new funcargs | def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
| <commit_before>from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
<commit_msg>Update requests test for the new funcargs<commit_after> | def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
| from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
Update requests test for the new funcargsdef test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
| <commit_before>from tests.lib import run_pip, reset_env
def test_timeout():
reset_env()
result = run_pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
<commit_msg>Update requests test for the new funcargs<commit_after>def test_timeout(script):
result = script.pip("--timeout", "0.01", "install", "-vvv", "INITools",
expect_error=True,
)
assert "Could not fetch URL https://pypi.python.org/simple/INITools/: timed out" in result.stdout
assert "Could not fetch URL https://pypi.python.org/simple/: timed out" in result.stdout
|
9297f2ffe0750ce4a40a35666ce9abb4bbfa487a | accounts/models.py | accounts/models.py | # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
| # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
| Add signal hook for Account creation on completed registration | Add signal hook for Account creation on completed registration
| Python | agpl-3.0 | christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,alper/volunteer_planner,pitpalme/volunteer_planner,pitpalme/volunteer_planner,pitpalme/volunteer_planner,christophmeissner/volunteer_planner,flindenberg/volunteer_planner,juliabiro/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner,MRigal/volunteer_planner,flindenberg/volunteer_planner,alper/volunteer_planner,christophmeissner/volunteer_planner,christophmeissner/volunteer_planner,volunteer-planner/volunteer_planner,juliabiro/volunteer_planner,klinger/volunteer_planner,flindenberg/volunteer_planner,coders4help/volunteer_planner,pitpalme/volunteer_planner,coders4help/volunteer_planner,coders4help/volunteer_planner,klinger/volunteer_planner,MRigal/volunteer_planner,MRigal/volunteer_planner,alper/volunteer_planner,coders4help/volunteer_planner,juliabiro/volunteer_planner,klinger/volunteer_planner,volunteer-planner/volunteer_planner | # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
Add signal hook for Account creation on completed registration | # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
| <commit_before># coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
<commit_msg>Add signal hook for Account creation on completed registration<commit_after> | # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
| # coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
Add signal hook for Account creation on completed registration# coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
| <commit_before># coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
<commit_msg>Add signal hook for Account creation on completed registration<commit_after># coding: utf-8
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from registration.signals import user_activated
from django.dispatch import receiver
class UserAccount(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='account')
class Meta:
verbose_name = _('user account')
verbose_name_plural = _('user accounts')
@receiver(user_activated)
def registration_completed(sender, user, request, **kwargs):
account, created = UserAccount.objects.get_or_create(user=user)
print account, created
|
fb5e742e0b820af4a14052d9bd12053dcdc36d52 | examples/recurrent-phase.py | examples/recurrent-phase.py | #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
lmj.cli.enable_default_logging()
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
t = np.linspace(0, 4 * np.pi, 256)
train = np.array([np.sin(t + i) for i in range(64, 256)])
dev = np.array([np.sin(t + i) for i in range(64)])
return train, dev
m = Main(layers=(1, 3, 1), batch_size=1)
m.train()
| #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
from matplotlib import pyplot as plt
lmj.cli.enable_default_logging()
T = 256
S = np.linspace(0, 4 * np.pi, T)
def sines(i=0):
return (0.7 * np.sin(S) + 0.3 * np.sin(i * S / 2)).reshape((T, 1))
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
train = np.array([sines(i) for i in range(64, 256)])
dev = np.array([sines(i) for i in range(64)])
return train, dev
m = Main(layers=(1, 10, 1), batch_size=1, num_updates=10, learning_rate=0.01)
m.train()
source = sines(13)
match = m.net(source)
# plot the input, output, and error of the network.
t = np.arange(T)
ax = plt.subplot(111)
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
for loc, spine in ax.spines.iteritems():
if loc in 'left bottom':
spine.set_position(('outward', 6))
elif loc in 'right top':
spine.set_color('none')
ax.plot(t, source, '.-', c='#111111', label='Target')
ax.plot(t, match, '.-', c='#1f77b4', label='Output')
ax.plot(t, abs(source - match), '.-', c='#d62728', label='Error')
ax.set_xlim(0, T)
plt.legend()
plt.show()
| Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input. | Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input.
| Python | mit | chrinide/theanets,devdoer/theanets,lmjohns3/theanets | #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
lmj.cli.enable_default_logging()
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
t = np.linspace(0, 4 * np.pi, 256)
train = np.array([np.sin(t + i) for i in range(64, 256)])
dev = np.array([np.sin(t + i) for i in range(64)])
return train, dev
m = Main(layers=(1, 3, 1), batch_size=1)
m.train()
Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input. | #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
from matplotlib import pyplot as plt
lmj.cli.enable_default_logging()
T = 256
S = np.linspace(0, 4 * np.pi, T)
def sines(i=0):
return (0.7 * np.sin(S) + 0.3 * np.sin(i * S / 2)).reshape((T, 1))
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
train = np.array([sines(i) for i in range(64, 256)])
dev = np.array([sines(i) for i in range(64)])
return train, dev
m = Main(layers=(1, 10, 1), batch_size=1, num_updates=10, learning_rate=0.01)
m.train()
source = sines(13)
match = m.net(source)
# plot the input, output, and error of the network.
t = np.arange(T)
ax = plt.subplot(111)
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
for loc, spine in ax.spines.iteritems():
if loc in 'left bottom':
spine.set_position(('outward', 6))
elif loc in 'right top':
spine.set_color('none')
ax.plot(t, source, '.-', c='#111111', label='Target')
ax.plot(t, match, '.-', c='#1f77b4', label='Output')
ax.plot(t, abs(source - match), '.-', c='#d62728', label='Error')
ax.set_xlim(0, T)
plt.legend()
plt.show()
| <commit_before>#!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
lmj.cli.enable_default_logging()
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
t = np.linspace(0, 4 * np.pi, 256)
train = np.array([np.sin(t + i) for i in range(64, 256)])
dev = np.array([np.sin(t + i) for i in range(64)])
return train, dev
m = Main(layers=(1, 3, 1), batch_size=1)
m.train()
<commit_msg>Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input.<commit_after> | #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
from matplotlib import pyplot as plt
lmj.cli.enable_default_logging()
T = 256
S = np.linspace(0, 4 * np.pi, T)
def sines(i=0):
return (0.7 * np.sin(S) + 0.3 * np.sin(i * S / 2)).reshape((T, 1))
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
train = np.array([sines(i) for i in range(64, 256)])
dev = np.array([sines(i) for i in range(64)])
return train, dev
m = Main(layers=(1, 10, 1), batch_size=1, num_updates=10, learning_rate=0.01)
m.train()
source = sines(13)
match = m.net(source)
# plot the input, output, and error of the network.
t = np.arange(T)
ax = plt.subplot(111)
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
for loc, spine in ax.spines.iteritems():
if loc in 'left bottom':
spine.set_position(('outward', 6))
elif loc in 'right top':
spine.set_color('none')
ax.plot(t, source, '.-', c='#111111', label='Target')
ax.plot(t, match, '.-', c='#1f77b4', label='Output')
ax.plot(t, abs(source - match), '.-', c='#d62728', label='Error')
ax.set_xlim(0, T)
plt.legend()
plt.show()
| #!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
lmj.cli.enable_default_logging()
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
t = np.linspace(0, 4 * np.pi, 256)
train = np.array([np.sin(t + i) for i in range(64, 256)])
dev = np.array([np.sin(t + i) for i in range(64)])
return train, dev
m = Main(layers=(1, 3, 1), batch_size=1)
m.train()
Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input.#!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
from matplotlib import pyplot as plt
lmj.cli.enable_default_logging()
T = 256
S = np.linspace(0, 4 * np.pi, T)
def sines(i=0):
return (0.7 * np.sin(S) + 0.3 * np.sin(i * S / 2)).reshape((T, 1))
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
train = np.array([sines(i) for i in range(64, 256)])
dev = np.array([sines(i) for i in range(64)])
return train, dev
m = Main(layers=(1, 10, 1), batch_size=1, num_updates=10, learning_rate=0.01)
m.train()
source = sines(13)
match = m.net(source)
# plot the input, output, and error of the network.
t = np.arange(T)
ax = plt.subplot(111)
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
for loc, spine in ax.spines.iteritems():
if loc in 'left bottom':
spine.set_position(('outward', 6))
elif loc in 'right top':
spine.set_color('none')
ax.plot(t, source, '.-', c='#111111', label='Target')
ax.plot(t, match, '.-', c='#1f77b4', label='Output')
ax.plot(t, abs(source - match), '.-', c='#d62728', label='Error')
ax.set_xlim(0, T)
plt.legend()
plt.show()
| <commit_before>#!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
lmj.cli.enable_default_logging()
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
t = np.linspace(0, 4 * np.pi, 256)
train = np.array([np.sin(t + i) for i in range(64, 256)])
dev = np.array([np.sin(t + i) for i in range(64)])
return train, dev
m = Main(layers=(1, 3, 1), batch_size=1)
m.train()
<commit_msg>Fix up recurrent phase test ! Add an output plot to show what the network does with a particular input.<commit_after>#!/usr/bin/env python
import logging
import numpy as np
import lmj.cli
import lmj.nn
from matplotlib import pyplot as plt
lmj.cli.enable_default_logging()
T = 256
S = np.linspace(0, 4 * np.pi, T)
def sines(i=0):
return (0.7 * np.sin(S) + 0.3 * np.sin(i * S / 2)).reshape((T, 1))
class Main(lmj.nn.Main):
def get_network(self):
return lmj.nn.recurrent.Autoencoder
def get_datasets(self):
train = np.array([sines(i) for i in range(64, 256)])
dev = np.array([sines(i) for i in range(64)])
return train, dev
m = Main(layers=(1, 10, 1), batch_size=1, num_updates=10, learning_rate=0.01)
m.train()
source = sines(13)
match = m.net(source)
# plot the input, output, and error of the network.
t = np.arange(T)
ax = plt.subplot(111)
ax.xaxis.tick_bottom()
ax.yaxis.tick_left()
for loc, spine in ax.spines.iteritems():
if loc in 'left bottom':
spine.set_position(('outward', 6))
elif loc in 'right top':
spine.set_color('none')
ax.plot(t, source, '.-', c='#111111', label='Target')
ax.plot(t, match, '.-', c='#1f77b4', label='Output')
ax.plot(t, abs(source - match), '.-', c='#d62728', label='Error')
ax.set_xlim(0, T)
plt.legend()
plt.show()
|
f3ad7f31784ea35da8655efa97ad3dd102e6dddb | django_bundles/management/commands/create_bundle_manifests.py | django_bundles/management/commands/create_bundle_manifests.py | import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
manifest.write("\n".join(f.file_path for f in bundle.files))
| import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
from django_bundles.processors import processor_pipeline
from django_bundles.utils.files import FileChunkGenerator
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
for bundle_file in bundle.files:
if bundle_file.processors:
# The file has a preprocessor. This means in its current state it may not be a valid file
# and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and
# write out an appropriate version
output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))
tmp_output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'temp', bundle.bundle_type)
with open(tmp_output_file_name, 'wb') as output_file:
for chunk in output_pipeline:
output_file.write(chunk)
output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'manifest', bundle.bundle_type)
os.rename(tmp_output_file_name, output_file_name)
manifest.write(output_file_name + "\n")
else:
manifest.write(bundle_file.file_path + "\n")
| Create processed versions of files for manifests | Create processed versions of files for manifests
This means if we have resources which are processed by django templates
then the processing will be done first and thus will yield for example a
valid javascript file
| Python | mit | sdcooke/django_bundles | import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
manifest.write("\n".join(f.file_path for f in bundle.files))
Create processed versions of files for manifests
This means if we have resources which are processed by django templates
then the processing will be done first and thus will yield for example a
valid javascript file | import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
from django_bundles.processors import processor_pipeline
from django_bundles.utils.files import FileChunkGenerator
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
for bundle_file in bundle.files:
if bundle_file.processors:
# The file has a preprocessor. This means in its current state it may not be a valid file
# and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and
# write out an appropriate version
output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))
tmp_output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'temp', bundle.bundle_type)
with open(tmp_output_file_name, 'wb') as output_file:
for chunk in output_pipeline:
output_file.write(chunk)
output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'manifest', bundle.bundle_type)
os.rename(tmp_output_file_name, output_file_name)
manifest.write(output_file_name + "\n")
else:
manifest.write(bundle_file.file_path + "\n")
| <commit_before>import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
manifest.write("\n".join(f.file_path for f in bundle.files))
<commit_msg>Create processed versions of files for manifests
This means if we have resources which are processed by django templates
then the processing will be done first and thus will yield for example a
valid javascript file<commit_after> | import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
from django_bundles.processors import processor_pipeline
from django_bundles.utils.files import FileChunkGenerator
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
for bundle_file in bundle.files:
if bundle_file.processors:
# The file has a preprocessor. This means in its current state it may not be a valid file
# and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and
# write out an appropriate version
output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))
tmp_output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'temp', bundle.bundle_type)
with open(tmp_output_file_name, 'wb') as output_file:
for chunk in output_pipeline:
output_file.write(chunk)
output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'manifest', bundle.bundle_type)
os.rename(tmp_output_file_name, output_file_name)
manifest.write(output_file_name + "\n")
else:
manifest.write(bundle_file.file_path + "\n")
| import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
manifest.write("\n".join(f.file_path for f in bundle.files))
Create processed versions of files for manifests
This means if we have resources which are processed by django templates
then the processing will be done first and thus will yield for example a
valid javascript fileimport os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
from django_bundles.processors import processor_pipeline
from django_bundles.utils.files import FileChunkGenerator
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
for bundle_file in bundle.files:
if bundle_file.processors:
# The file has a preprocessor. This means in its current state it may not be a valid file
# and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and
# write out an appropriate version
output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))
tmp_output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'temp', bundle.bundle_type)
with open(tmp_output_file_name, 'wb') as output_file:
for chunk in output_pipeline:
output_file.write(chunk)
output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'manifest', bundle.bundle_type)
os.rename(tmp_output_file_name, output_file_name)
manifest.write(output_file_name + "\n")
else:
manifest.write(bundle_file.file_path + "\n")
| <commit_before>import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
manifest.write("\n".join(f.file_path for f in bundle.files))
<commit_msg>Create processed versions of files for manifests
This means if we have resources which are processed by django templates
then the processing will be done first and thus will yield for example a
valid javascript file<commit_after>import os
from django.core.management.base import BaseCommand
from django_bundles.core import get_bundles
from django_bundles.processors import processor_pipeline
from django_bundles.utils.files import FileChunkGenerator
class Command(BaseCommand):
args = "target_directory"
help = "Writes out files containing the list of input files for each bundle"
requires_model_validation = False
def handle(self, target_directory, *args, **options):
try:
os.mkdir(target_directory)
except OSError:
pass
for bundle in get_bundles():
manifest_filename = os.path.join(target_directory, bundle.name) + '.manifest'
with open(manifest_filename, 'w') as manifest:
for bundle_file in bundle.files:
if bundle_file.processors:
# The file has a preprocessor. This means in its current state it may not be a valid file
# and thus not suitable for inclusion in the manifest. Do any appropriate preprocessing and
# write out an appropriate version
output_pipeline = processor_pipeline(bundle_file.processors, FileChunkGenerator(open(bundle_file.file_path, 'rb')))
tmp_output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'temp', bundle.bundle_type)
with open(tmp_output_file_name, 'wb') as output_file:
for chunk in output_pipeline:
output_file.write(chunk)
output_file_name = '%s.%s.%s' % (bundle_file.file_path, 'manifest', bundle.bundle_type)
os.rename(tmp_output_file_name, output_file_name)
manifest.write(output_file_name + "\n")
else:
manifest.write(bundle_file.file_path + "\n")
|
3baa7fe8f3ff7f7840f22647754783967657fe16 | skimage/viewer/utils/dialogs.py | skimage/viewer/utils/dialogs.py | import os
from ..qt import QtGui
def open_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = str(QtGui.QFileDialog.getOpenFileName())
if len(filename) == 0:
return None
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
# Handle discrepancy between PyQt4 and PySide APIs.
if isinstance(filename, tuple):
filename = filename[0]
filename = str(filename)
if len(filename) == 0:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
| import os
from ..qt import QtGui
__all__ = ['open_file_dialog', 'save_file_dialog']
def _format_filename(filename):
if isinstance(filename, tuple):
# Handle discrepancy between PyQt4 and PySide APIs.
filename = filename[0]
if len(filename) == 0:
return None
return str(filename)
def open_file_dialog():
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getOpenFileName()
filename = _format_filename(filename)
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
filename = _format_filename(filename)
if filename is None:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
| Fix file open dialog for PySide | Fix file open dialog for PySide
| Python | bsd-3-clause | juliusbierk/scikit-image,youprofit/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,newville/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,almarklein/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,chriscrosscutler/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,SamHames/scikit-image,blink1073/scikit-image,almarklein/scikit-image,newville/scikit-image,michaelpacer/scikit-image,robintw/scikit-image,pratapvardhan/scikit-image,juliusbierk/scikit-image,WarrenWeckesser/scikits-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,chintak/scikit-image,blink1073/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,chintak/scikit-image,emon10005/scikit-image,chintak/scikit-image,paalge/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,keflavich/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,WarrenWeckesser/scikits-image,rjeli/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,ClinicalGraphics/scikit-image,michaelaye/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,SamHames/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,SamHames/scikit-image,robintw/scikit-image,ofgulban/scikit-image,keflavich/scikit-image,ajaybhat/scikit-image,Britefury/scikit-image,jwiggins/scikit-image | import os
from ..qt import QtGui
def open_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = str(QtGui.QFileDialog.getOpenFileName())
if len(filename) == 0:
return None
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
# Handle discrepancy between PyQt4 and PySide APIs.
if isinstance(filename, tuple):
filename = filename[0]
filename = str(filename)
if len(filename) == 0:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
Fix file open dialog for PySide | import os
from ..qt import QtGui
__all__ = ['open_file_dialog', 'save_file_dialog']
def _format_filename(filename):
if isinstance(filename, tuple):
# Handle discrepancy between PyQt4 and PySide APIs.
filename = filename[0]
if len(filename) == 0:
return None
return str(filename)
def open_file_dialog():
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getOpenFileName()
filename = _format_filename(filename)
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
filename = _format_filename(filename)
if filename is None:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
| <commit_before>import os
from ..qt import QtGui
def open_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = str(QtGui.QFileDialog.getOpenFileName())
if len(filename) == 0:
return None
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
# Handle discrepancy between PyQt4 and PySide APIs.
if isinstance(filename, tuple):
filename = filename[0]
filename = str(filename)
if len(filename) == 0:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
<commit_msg>Fix file open dialog for PySide<commit_after> | import os
from ..qt import QtGui
__all__ = ['open_file_dialog', 'save_file_dialog']
def _format_filename(filename):
if isinstance(filename, tuple):
# Handle discrepancy between PyQt4 and PySide APIs.
filename = filename[0]
if len(filename) == 0:
return None
return str(filename)
def open_file_dialog():
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getOpenFileName()
filename = _format_filename(filename)
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
filename = _format_filename(filename)
if filename is None:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
| import os
from ..qt import QtGui
def open_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = str(QtGui.QFileDialog.getOpenFileName())
if len(filename) == 0:
return None
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
# Handle discrepancy between PyQt4 and PySide APIs.
if isinstance(filename, tuple):
filename = filename[0]
filename = str(filename)
if len(filename) == 0:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
Fix file open dialog for PySideimport os
from ..qt import QtGui
__all__ = ['open_file_dialog', 'save_file_dialog']
def _format_filename(filename):
if isinstance(filename, tuple):
# Handle discrepancy between PyQt4 and PySide APIs.
filename = filename[0]
if len(filename) == 0:
return None
return str(filename)
def open_file_dialog():
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getOpenFileName()
filename = _format_filename(filename)
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
filename = _format_filename(filename)
if filename is None:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
| <commit_before>import os
from ..qt import QtGui
def open_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = str(QtGui.QFileDialog.getOpenFileName())
if len(filename) == 0:
return None
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
# Handle discrepancy between PyQt4 and PySide APIs.
if isinstance(filename, tuple):
filename = filename[0]
filename = str(filename)
if len(filename) == 0:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
<commit_msg>Fix file open dialog for PySide<commit_after>import os
from ..qt import QtGui
__all__ = ['open_file_dialog', 'save_file_dialog']
def _format_filename(filename):
if isinstance(filename, tuple):
# Handle discrepancy between PyQt4 and PySide APIs.
filename = filename[0]
if len(filename) == 0:
return None
return str(filename)
def open_file_dialog():
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getOpenFileName()
filename = _format_filename(filename)
return filename
def save_file_dialog(default_format='png'):
"""Return user-selected file path."""
filename = QtGui.QFileDialog.getSaveFileName()
filename = _format_filename(filename)
if filename is None:
return None
#TODO: io plugins should assign default image formats
basename, ext = os.path.splitext(filename)
if not ext:
filename = '%s.%s' % (filename, default_format)
return filename
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.