commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b999c9a9bbd8052771ade80bad14f8aef2bef58b
|
conda/python-scons/system.py
|
conda/python-scons/system.py
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'string',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'choice',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
Test add site_scons as SCons module
|
Test add site_scons as SCons module
|
Python
|
apache-2.0
|
StatisKit/StatisKit,StatisKit/StatisKit
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'string',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1Test add site_scons as SCons module
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'choice',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
<commit_before>import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'string',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1<commit_msg>Test add site_scons as SCons module<commit_after>
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'choice',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'string',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1Test add site_scons as SCons moduleimport platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'choice',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
<commit_before>import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'string',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1<commit_msg>Test add site_scons as SCons module<commit_after>import platform
from SCons.Script import AddOption, GetOption
SYSTEMS = dict(Linux = "linux",
Darwin = "osx",
Windows = "windows")
system = str(platform.system())
if not system in SYSTEMS:
system = "unknown"
AddOption('--system',
dest = 'system',
type = 'choice',
nargs = 1,
action = 'store',
help = 'system',
choices = SYSTEMS.values(),
default = system)
def generate(env, **kwargs):
env['SYSTEM'] = GetOption('system')
if env['SYSTEM'] == 'unknown':
raise ValueError('Unknown system')
def exists(env):
return 1
|
2d05ede4db8bf80834e04ffb5f9d0ec11982851d
|
normandy/recipes/validators.py
|
normandy/recipes/validators.py
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
Check for empty strings in required validator
|
Check for empty strings in required validator
|
Python
|
mpl-2.0
|
Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
Check for empty strings in required validator
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
<commit_before>import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
<commit_msg>Check for empty strings in required validator<commit_after>
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
Check for empty strings in required validatorimport json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
<commit_before>import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance:
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
<commit_msg>Check for empty strings in required validator<commit_after>import json
import jsonschema
from django.core.exceptions import ValidationError
# Add path to required validator so we can get property name
def _required(validator, required, instance, schema):
'''Validate 'required' properties.'''
if not validator.is_type(instance, 'object'):
return
for index, requirement in enumerate(required):
if requirement not in instance or instance[requirement] == '':
error = jsonschema.ValidationError(
'This field may not be blank.',
path=[requirement]
)
yield error
# Construct validator as extension of Json Schema Draft 4.
Validator = jsonschema.validators.extend(
validator=jsonschema.validators.Draft4Validator,
validators={
'required': _required
}
)
def validate_json(value):
"""
Validate that a given value can be successfully parsed as JSON.
"""
try:
json.loads(value)
except json.JSONDecodeError as err:
raise ValidationError('%s is not valid JSON: %s', params=(value, err.msg))
|
fe8221e398bb9a1ddabf08002441acb37dfef515
|
scripts/release_test/arguments.py
|
scripts/release_test/arguments.py
|
import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
import argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
Improve help messages from release_test
|
Improve help messages from release_test
|
Python
|
mit
|
ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel
|
import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
Improve help messages from release_test
|
import argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
<commit_before>import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
<commit_msg>Improve help messages from release_test<commit_after>
|
import argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
Improve help messages from release_testimport argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
<commit_before>import argparse, common, sys, tests
from features import check_features, get_features
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument(
'tests', nargs='*', help='The list of tests to run')
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons')
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
<commit_msg>Improve help messages from release_test<commit_after>import argparse, common, sys, tests
from features import check_features, get_features, FEATURES
def arguments(argv=sys.argv[1:]):
parser = argparse.ArgumentParser()
names = [t.__name__.split('.')[1] for t in tests.__all__]
names = ', '.join(names)
parser.add_argument(
'tests', nargs='*',
help='The list of tests to run. Tests are:' + names)
features = ', '.join(FEATURES)
parser.add_argument(
'--features', '-f', default=[], action='append',
help='A list of features separated by colons. Features are: ' +
features)
args = parser.parse_args(argv)
if args.tests:
all_tests = [(t, getattr(tests, t, None)) for t in args.tests]
bad_tests = [t for (t, a) in all_tests if a is None]
if bad_tests:
raise ValueError('Bad test names: ' + ', '.join(bad_tests))
all_tests = tuple(a for (t, a) in all_tests)
else:
all_tests = tests.__all__
if args.features:
features = set(':'.join(args.features).split(':'))
check_features(features)
else:
features = get_features()
return all_tests, features
if __name__ == '__main__':
common.printer(arguments())
|
95d1bf068ebf2f57eaf44accbe15aa30d236d8ea
|
astropy/coordinates/tests/test_distance.py
|
astropy/coordinates/tests/test_distance.py
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
Add test for applying a distance to a coordinate via a quantity
|
Add test for applying a distance to a coordinate via a quantity
|
Python
|
bsd-3-clause
|
pllim/astropy,astropy/astropy,dhomeier/astropy,dhomeier/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,pllim/astropy,tbabej/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,larrybradley/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,tbabej/astropy,StuartLittlefair/astropy,kelle/astropy,MSeifert04/astropy,saimn/astropy,tbabej/astropy,funbaker/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,DougBurke/astropy,mhvk/astropy,joergdietrich/astropy,joergdietrich/astropy,StuartLittlefair/astropy,dhomeier/astropy,pllim/astropy,lpsinger/astropy,larrybradley/astropy,stargaser/astropy,DougBurke/astropy,astropy/astropy,lpsinger/astropy,tbabej/astropy,mhvk/astropy,tbabej/astropy,saimn/astropy,pllim/astropy,bsipocz/astropy,astropy/astropy,mhvk/astropy,aleksandr-bakanov/astropy,saimn/astropy,MSeifert04/astropy,joergdietrich/astropy,saimn/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,aleksandr-bakanov/astropy,funbaker/astropy,AustereCuriosity/astropy,MSeifert04/astropy,larrybradley/astropy,kelle/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,stargaser/astropy,stargaser/astropy,funbaker/astropy,stargaser/astropy,larrybradley/astropy,DougBurke/astropy,joergdietrich/astropy,saimn/astropy,kelle/astropy,bsipocz/astropy,funbaker/astropy,mhvk/astropy,DougBurke/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,astropy/astropy,lpsinger/astropy,dhomeier/astropy,kelle/astropy,MSeifert04/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,bsipocz/astropy
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
Add test for applying a distance to a coordinate via a quantity
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
<commit_msg>Add test for applying a distance to a coordinate via a quantity<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
Add test for applying a distance to a coordinate via a quantity# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
<commit_msg>Add test for applying a distance to a coordinate via a quantity<commit_after># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from numpy import testing as npt
from ... import units as u
"""
This includes tests for distances/cartesian points that are *not* in the API
tests. Right now that's just regression tests.
"""
def test_distance_change():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
c.distance = Distance(1, unit=u.kpc)
oldx = c.x
assert (oldx - 0.35284083171901953) < 1e-10
#now x should increase when the distance increases
c.distance = Distance(2, unit=u.kpc)
assert c.x == oldx * 2
def test_distance_from_quantity():
from .. import RA, Dec, ICRSCoordinates, Distance
ra = RA("4:08:15.162342", unit=u.hour)
dec = Dec("-41:08:15.162342", unit=u.degree)
c = ICRSCoordinates(ra, dec)
# a Quantity object should be able to supply a distance
q = 2 * u.kpc
c.distance = q
|
1975aeb06a85d8983a3815ffd89076af66d61561
|
payments/urls.py
|
payments/urls.py
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
Use imported views instead of lazy import
|
Use imported views instead of lazy import
|
Python
|
mit
|
pinax/django-stripe-payments
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
Use imported views instead of lazy import
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
<commit_before>from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
<commit_msg>Use imported views instead of lazy import<commit_after>
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
Use imported views instead of lazy importfrom django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
<commit_before>from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView
)
urlpatterns = patterns(
"payments.views",
url(r"^webhook/$", "webhook", name="payments_webhook"),
url(r"^a/subscribe/$", "subscribe", name="payments_ajax_subscribe"),
url(r"^a/change/card/$", "change_card", name="payments_ajax_change_card"),
url(r"^a/change/plan/$", "change_plan", name="payments_ajax_change_plan"),
url(r"^a/cancel/$", "cancel", name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
<commit_msg>Use imported views instead of lazy import<commit_after>from django.conf.urls import patterns, url
try:
from account.decorators import login_required
except ImportError:
from django.contrib.auth.decorators import login_required
from .views import (
CancelView,
ChangeCardView,
ChangePlanView,
HistoryView,
SubscribeView,
webhook,
subscribe,
change_card,
change_plan,
cancel
)
urlpatterns = patterns(
"",
url(r"^webhook/$", webhook, name="payments_webhook"),
url(r"^a/subscribe/$", subscribe, name="payments_ajax_subscribe"),
url(r"^a/change/card/$", change_card, name="payments_ajax_change_card"),
url(r"^a/change/plan/$", change_plan, name="payments_ajax_change_plan"),
url(r"^a/cancel/$", cancel, name="payments_ajax_cancel"),
url(
r"^subscribe/$",
login_required(SubscribeView.as_view()),
name="payments_subscribe"
),
url(
r"^change/card/$",
login_required(ChangeCardView.as_view()),
name="payments_change_card"
),
url(
r"^change/plan/$",
login_required(ChangePlanView.as_view()),
name="payments_change_plan"
),
url(
r"^cancel/$",
login_required(CancelView.as_view()),
name="payments_cancel"
),
url(
r"^history/$",
login_required(HistoryView.as_view()),
name="payments_history"
),
)
|
d85a288cbacf6bc31b1d544dd269d392aed4a1ec
|
openquake/hazardlib/general.py
|
openquake/hazardlib/general.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
Add stderr redirect to git_suffix to get more clean messages
|
Add stderr redirect to git_suffix to get more clean messages
|
Python
|
agpl-3.0
|
larsbutler/oq-hazardlib,rcgee/oq-hazardlib,gem/oq-engine,silviacanessa/oq-hazardlib,rcgee/oq-hazardlib,gem/oq-hazardlib,gem/oq-engine,g-weatherill/oq-hazardlib,larsbutler/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,silviacanessa/oq-hazardlib,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,g-weatherill/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib,vup1120/oq-hazardlib,larsbutler/oq-hazardlib,gem/oq-engine,mmpagani/oq-hazardlib,mmpagani/oq-hazardlib,vup1120/oq-hazardlib,g-weatherill/oq-hazardlib,mmpagani/oq-hazardlib,gem/oq-engine,gem/oq-hazardlib
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
Add stderr redirect to git_suffix to get more clean messages
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
<commit_before># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
<commit_msg>Add stderr redirect to git_suffix to get more clean messages<commit_after>
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
Add stderr redirect to git_suffix to get more clean messages# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
<commit_before># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
<commit_msg>Add stderr redirect to git_suffix to get more clean messages<commit_after># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
def git_suffix(fname):
"""
:returns: `<short git hash>` if Git repository found
"""
try:
gh = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'],
stderr=open(os.devnull, 'w'), cwd=os.path.dirname(fname)).strip()
gh = "-git" + gh if gh else ''
return gh
except:
# trapping everything on purpose; git may not be installed or it
# may not work properly
return ''
|
32508dea4ef00fb54919c0260b7ba2902835faf5
|
prepareupload.py
|
prepareupload.py
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
Print messages for prepare upload.
|
Print messages for prepare upload.
|
Python
|
bsd-3-clause
|
OLRC/SwiftBulkUploader,cudevmaxwell/SwiftBulkUploader
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
Print messages for prepare upload.
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
<commit_before>import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
<commit_msg>Print messages for prepare upload.<commit_after>
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
Print messages for prepare upload.import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
<commit_before>import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
<commit_msg>Print messages for prepare upload.<commit_after>import sys
import olrcdb
import os
# Globals
COUNT = 0
class FileParser(object):
'''Object used to parse through a directory for all it's files. Collects
the paths of all the files and stores a record of these in a new table in
the database.
The Schema of the database is:
NewTable(path, uploaded=false)
'''
def __init__(self, directory, table_name):
self.directory = directory
self.table_name = table_name
def prepare_upload(connect, directory, table_name):
'''Given a database connection, directory and table_name,
-Create the table in the database
-populate the table with (path, uploaded=false)
where each path is a file in the given directory.'''
global COUNT
for filename in os.listdir(directory):
file_path = os.path.join(directory, filename)
# Add file name to the list.
if os.path.isfile(file_path):
connect.insert_path(file_path, table_name)
COUNT += 1
sys.stdout.flush()
sys.stdout.write("\r{0} parsed. \n".format(COUNT))
else:
prepare_upload(connect, file_path, table_name)
if __name__ == "__main__":
# Check for proper parameters
if len(sys.argv) != 3:
sys.stderr.write(
'Usage: python prepareupload.py path-to-drive table-name\n'
)
sys.exit(1)
connect = olrcdb.DatabaseConnection()
connect.create_table(sys.argv[2])
prepare_upload(connect, sys.argv[1], sys.argv[2])
|
66b20aa7fbd322a051ab7ae26ecd8c46f7605763
|
ptoolbox/tags.py
|
ptoolbox/tags.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
Remove orientation tag parsing, not needed.
|
Remove orientation tag parsing, not needed.
|
Python
|
mit
|
vperron/picasa-toolbox
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
Remove orientation tag parsing, not needed.
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
<commit_before># -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
<commit_msg>Remove orientation tag parsing, not needed.<commit_after>
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
Remove orientation tag parsing, not needed.# -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
<commit_before># -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
TAG_ORIENTATION = 'Image Orientation'
# XXX: this is a terrible way to retrieve the orientations. Exifread regretfully does not
# get back raw EXIF orientations, and no other library is available on pip as of today.
ORIENTATIONS = [
'Horizontal (normal)',
'Mirrored horizontal',
'Rotated 180',
'Mirrored vertical',
'Mirrored horizontal then rotated 90 CCW',
'Rotated 90 CCW',
'Mirrored horizontal then rotated 90 CW',
'Rotated 90 CW',
]
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
def parse_orientation(tags):
tag = tags.get(TAG_ORIENTATION, None)
if not tag:
raise KeyError(TAG_ORIENTATION)
return ORIENTATIONS.index(str(tag)) + 1 # XXX: convert back to original EXIF orientation
<commit_msg>Remove orientation tag parsing, not needed.<commit_after># -*- coding: utf-8 -*-
from datetime import datetime
TAG_WIDTH = 'EXIF ExifImageWidth'
TAG_HEIGHT = 'EXIF ExifImageLength'
TAG_DATETIME = 'Image DateTime'
def parse_time(tags):
tag = tags.get(TAG_DATETIME, None)
if not tag:
raise KeyError(TAG_DATETIME)
return datetime.strptime(str(tag), "%Y:%m:%d %H:%M:%S")
def parse_width(tags):
tag = tags.get(TAG_WIDTH, None)
if not tag:
raise KeyError(TAG_WIDTH)
return int(str(tag), 10)
def parse_height(tags):
tag = tags.get(TAG_HEIGHT, None)
if not tag:
raise KeyError(TAG_HEIGHT)
return int(str(tag), 10)
|
ad3a495e38e22f3759a724a23ce0492cd42e0bc4
|
qual/calendar.py
|
qual/calendar.py
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
Use from_date to construct from year, month, day.
|
Use from_date to construct from year, month, day.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
Use from_date to construct from year, month, day.
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
<commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
<commit_msg>Use from_date to construct from year, month, day.<commit_after>
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
Use from_date to construct from year, month, day.from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
<commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return DateWithCalendar(JulianCalendar, d)
<commit_msg>Use from_date to construct from year, month, day.<commit_after>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
e5a392c5c0e5a3c1e71764a422cbea16d81ba3a6
|
app.py
|
app.py
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
Remove '.wav' addition to all files uploaded
|
Remove '.wav' addition to all files uploaded
|
Python
|
mit
|
spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
Remove '.wav' addition to all files uploaded
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
<commit_msg>Remove '.wav' addition to all files uploaded<commit_after>
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
Remove '.wav' addition to all files uploadedfrom flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
<commit_msg>Remove '.wav' addition to all files uploaded<commit_after>from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('', 'index.html')
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
4303a5cb38f2252dfe09a0ca21320d4bd67bd966
|
byceps/blueprints/user/current/forms.py
|
byceps/blueprints/user/current/forms.py
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
Use `<input type="tel">` for phone number field
|
Use `<input type="tel">` for phone number field
|
Python
|
bsd-3-clause
|
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
Use `<input type="tel">` for phone number field
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
<commit_before>"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
<commit_msg>Use `<input type="tel">` for phone number field<commit_after>
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
Use `<input type="tel">` for phone number field"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
<commit_before>"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = StringField('Telefonnummer', [Optional(), Length(max=20)])
<commit_msg>Use `<input type="tel">` for phone number field<commit_after>"""
byceps.blueprints.user.current.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from wtforms import DateField, StringField
from wtforms.fields.html5 import TelField
from wtforms.validators import InputRequired, Length, Optional
from ....util.l10n import LocalizedForm
class DetailsForm(LocalizedForm):
first_names = StringField('Vorname(n)', [InputRequired(), Length(min=2)])
last_name = StringField('Nachname', [InputRequired(), Length(min=2, max=80)])
date_of_birth = DateField('Geburtsdatum',
[Optional()],
format='%d.%m.%Y')
country = StringField('Land', [Optional(), Length(max=60)])
zip_code = StringField('PLZ', [Optional()])
city = StringField('Stadt', [Optional()])
street = StringField('Straße', [Optional()])
phone_number = TelField('Telefonnummer', [Optional(), Length(max=20)])
|
333aa4499e0118eebaa6ce72e97aef5f9e701865
|
skyfield/tests/test_magnitudes.py
|
skyfield/tests/test_magnitudes.py
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert [f'{m:.3f}' for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert ['%.3f' % m for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
Fix magnitudes test for Python 2.7 in CI
|
Fix magnitudes test for Python 2.7 in CI
|
Python
|
mit
|
skyfielders/python-skyfield,skyfielders/python-skyfield
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert [f'{m:.3f}' for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
Fix magnitudes test for Python 2.7 in CI
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert ['%.3f' % m for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
<commit_before>from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert [f'{m:.3f}' for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
<commit_msg>Fix magnitudes test for Python 2.7 in CI<commit_after>
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert ['%.3f' % m for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert [f'{m:.3f}' for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
Fix magnitudes test for Python 2.7 in CIfrom skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert ['%.3f' % m for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
<commit_before>from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert [f'{m:.3f}' for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
<commit_msg>Fix magnitudes test for Python 2.7 in CI<commit_after>from skyfield.api import load
from skyfield.magnitudelib import planetary_magnitude
def test_magnitudes():
ts = load.timescale()
#t = ts.utc(1995, 5, 22) # Rings edge-on from Earth.
t = ts.utc(2021, 10, 4)
eph = load('de421.bsp')
names = [
'mercury', 'venus', 'mars', 'jupiter barycenter',
'saturn barycenter', 'uranus barycenter', 'neptune barycenter',
]
e = eph['earth'].at(t)
positions = [e.observe(eph[name]) for name in names]
magnitudes = [planetary_magnitude(position) for position in positions]
assert ['%.3f' % m for m in magnitudes] == [
'2.393', '-4.278', '1.592', '-2.693', '0.508', '5.701', '7.690',
]
|
71c9235a7e48882fc8c1393e9527fea4531c536c
|
filter_plugins/fap.py
|
filter_plugins/fap.py
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
Add really hacky way to reformat restic repos
|
Add really hacky way to reformat restic repos
|
Python
|
mit
|
kradalby/plays,kradalby/plays
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
Add really hacky way to reformat restic repos
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
<commit_before>#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
<commit_msg>Add really hacky way to reformat restic repos<commit_after>
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
Add really hacky way to reformat restic repos#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
<commit_before>#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
class FilterModule(object):
def filters(self):
return {"site_code": site_code}
<commit_msg>Add really hacky way to reformat restic repos<commit_after>#!/usr/bin/python
import ipaddress
def site_code(ipv4):
# Verify IP address
_ = ipaddress.ip_address(ipv4)
segments = ipv4.split(".")
return int(segments[1])
# rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no
# rclone:Jotta:storage.tjoda.fap.no
# /Volumes/storage/restic/kramacbook
def restic_repo_friendly_name(repo: str) -> str:
if "https://" in repo:
repo = repo.replace("https://", "")
print(repo)
type_, address, *_ = repo.split(":")
(r, *_) = address.split("/")
return "_".join([type_, r]).replace(".", "_")
elif ":" not in repo:
# Most likely a file path
type_ = "disk"
path = list(filter(None, repo.split("/")))
if path[0] == "Volumes":
return "_".join([type_, path[1]])
return "_".join([type_, repo.replace("/", "_")])
else:
type_, *rest = repo.split(":")
return "_".join([type_, rest[0]])
class FilterModule(object):
def filters(self):
return {
"site_code": site_code,
"restic_repo_friendly_name": restic_repo_friendly_name,
}
|
478bdba2edf9654fc90e1a50bad60cd5b89791f9
|
ueberwachungspaket/decorators.py
|
ueberwachungspaket/decorators.py
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml import Response
from twilio.util import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = Response()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml.voice_response import VoiceResponse
from twilio.request_validator import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = VoiceResponse()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
Make it compile with recent version of twilio
|
Make it compile with recent version of twilio
|
Python
|
mit
|
AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml import Response
from twilio.util import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = Response()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
Make it compile with recent version of twilio
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml.voice_response import VoiceResponse
from twilio.request_validator import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = VoiceResponse()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
<commit_before>from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml import Response
from twilio.util import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = Response()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
<commit_msg>Make it compile with recent version of twilio<commit_after>
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml.voice_response import VoiceResponse
from twilio.request_validator import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = VoiceResponse()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml import Response
from twilio.util import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = Response()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
Make it compile with recent version of twiliofrom urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml.voice_response import VoiceResponse
from twilio.request_validator import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = VoiceResponse()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
<commit_before>from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml import Response
from twilio.util import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = Response()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
<commit_msg>Make it compile with recent version of twilio<commit_after>from urllib.parse import urlparse, urlunparse
from functools import wraps
from flask import abort, current_app, request
from twilio.twiml.voice_response import VoiceResponse
from twilio.request_validator import RequestValidator
from config import *
def twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
url = list(urlparse(request.url))
url[1] = url[1].encode("idna").decode("utf-8")
url = urlunparse(url)
signature = request.headers.get("X-TWILIO-SIGNATURE", "")
request_valid = validator.validate(url, request.form, signature)
if request_valid or current_app.debug:
resp = VoiceResponse()
f(resp, *args, **kwargs)
return str(resp)
else:
return abort(403)
return decorated_function
|
e9ca748641e04f63944521ef0bc3090960f77cab
|
deployment/datapusher_settings.py
|
deployment/datapusher_settings.py
|
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/job_store.db'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
#LOG_FILE = '/tmp/ckan_service.log'
STDERR = True
|
import os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
|
Add missing config options, allow to define via env vars
|
Add missing config options, allow to define via env vars
|
Python
|
agpl-3.0
|
ckan/datapusher
|
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/job_store.db'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
#LOG_FILE = '/tmp/ckan_service.log'
STDERR = True
Add missing config options, allow to define via env vars
|
import os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
|
<commit_before>import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/job_store.db'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
#LOG_FILE = '/tmp/ckan_service.log'
STDERR = True
<commit_msg>Add missing config options, allow to define via env vars<commit_after>
|
import os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
|
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/job_store.db'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
#LOG_FILE = '/tmp/ckan_service.log'
STDERR = True
Add missing config options, allow to define via env varsimport os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
|
<commit_before>import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/job_store.db'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
#LOG_FILE = '/tmp/ckan_service.log'
STDERR = True
<commit_msg>Add missing config options, allow to define via env vars<commit_after>import os
import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# Webserver host and port
HOST = os.environ.get('DATAPUSHER_HOST', '0.0.0.0')
PORT = os.environ.get('DATAPUSHER_PORT', 8800)
# Database
SQLALCHEMY_DATABASE_URI = os.environ.get('DATAPUSHER_SQLALCHEMY_DATABASE_URI', 'sqlite:////tmp/job_store.db')
# Download and streaming settings
MAX_CONTENT_LENGTH = int(os.environ.get('DATAPUSHER_MAX_CONTENT_LENGTH', '1024000'))
CHUNK_SIZE = int(os.environ.get('DATAPUSHER_CHUNK_SIZE', '16384'))
CHUNK_INSERT_ROWS = int(os.environ.get('DATAPUSHER_CHUNK_INSERT_ROWS', '250'))
DOWNLOAD_TIMEOUT = int(os.environ.get('DATAPUSHER_DOWNLOAD_TIMEOUT', '30'))
# Verify SSL
SSL_VERIFY = os.environ.get('DATAPUSHER_SSL_VERIFY', True)
# logging
#LOG_FILE = '/tmp/ckan_service.log'
|
417415283d87654b066c11d807516d3cd5b5bf3d
|
tests/test_probabilistic_interleave_speed.py
|
tests/test_probabilistic_interleave_speed.py
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
Add tests for measuring the speed of probabilistic interleaving
|
Add tests for measuring the speed of probabilistic interleaving
|
Python
|
mit
|
mpkato/interleaving
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
Add tests for measuring the speed of probabilistic interleaving
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
<commit_before>import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
<commit_msg>Add tests for measuring the speed of probabilistic interleaving<commit_after>
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
Add tests for measuring the speed of probabilistic interleavingimport interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
<commit_before>import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(100, 200))
for i in range(1000):
method = il.Probabilistic([r1, r2])
ranking = method.interleave()
print(list(ranking))
<commit_msg>Add tests for measuring the speed of probabilistic interleaving<commit_after>import interleaving as il
import numpy as np
import pytest
np.random.seed(0)
from .test_methods import TestMethods
class TestProbabilisticInterleaveSpeed(TestMethods):
def test_interleave(self):
r1 = list(range(100))
r2 = list(range(50, 150))
r3 = list(range(100, 200))
r4 = list(range(150, 250))
for i in range(1000):
method = il.Probabilistic([r1, r2, r3, r4])
ranking = method.interleave()
method.evaluate(ranking, [0, 1, 2])
|
b8839af335757f58fa71916ff3394f5a6806165d
|
user_management/api/tests/test_exceptions.py
|
user_management/api/tests/test_exceptions.py
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
Use more explicit name for error
|
Use more explicit name for error
|
Python
|
bsd-2-clause
|
incuna/django-user-management,incuna/django-user-management
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
Use more explicit name for error
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
<commit_before>from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
<commit_msg>Use more explicit name for error<commit_after>
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
Use more explicit name for errorfrom django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
<commit_before>from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as e:
raise InvalidExpiredToken
self.assertEqual(e.exception.status_code, HTTP_400_BAD_REQUEST)
message = e.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
<commit_msg>Use more explicit name for error<commit_after>from django.test import TestCase
from rest_framework.status import HTTP_400_BAD_REQUEST
from ..exceptions import InvalidExpiredToken
class InvalidExpiredTokenTest(TestCase):
"""Assert `InvalidExpiredToken` behaves as expected."""
def test_raise(self):
"""Assert `InvalidExpiredToken` can be raised."""
with self.assertRaises(InvalidExpiredToken) as error:
raise InvalidExpiredToken
self.assertEqual(error.exception.status_code, HTTP_400_BAD_REQUEST)
message = error.exception.detail.format()
self.assertEqual(message, 'Invalid or expired token.')
|
287dc6f7a7f0321fec8e35d1dc08f07a3b12f63b
|
test/342-winter-sports-pistes.py
|
test/342-winter-sports-pistes.py
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
Add piste test to catch dev issue
|
Add piste test to catch dev issue
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
Add piste test to catch dev issue
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
<commit_before># http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
<commit_msg>Add piste test to catch dev issue<commit_after>
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
Add piste test to catch dev issue# http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
<commit_before># http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
<commit_msg>Add piste test to catch dev issue<commit_after># http://www.openstreetmap.org/way/313466665
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'easy',
'id': 313466665 })
# http://www.openstreetmap.org/way/313466720
assert_has_feature(
15, 5467, 12531, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'expert',
'id': 313466720 })
# Way: 49'er (313466490) http://www.openstreetmap.org/way/313466490
assert_has_feature(
16, 10939, 25061, 'roads',
{ 'kind': 'piste',
'piste_type': 'downhill',
'piste_difficulty': 'intermediate',
'id': 313466490 })
|
2627a89fb56e8fc6ae0a4704333e9ed0012048bd
|
vitrage/tests/functional/datasources/base.py
|
vitrage/tests/functional/datasources/base.py
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertTrue(len(entity_vertices) > 0)
return entity_vertices[0][VProps.ID]
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertGreater(len(entity_vertices), 0)
return entity_vertices[0][VProps.ID]
|
Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
|
Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
assertGreater provides a nicer error message if it fails.
Change-Id: Ibfe6a85760a766d310d75bc484e933e96cb0c02f
|
Python
|
apache-2.0
|
openstack/vitrage,openstack/vitrage,openstack/vitrage
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertTrue(len(entity_vertices) > 0)
return entity_vertices[0][VProps.ID]
Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
assertGreater provides a nicer error message if it fails.
Change-Id: Ibfe6a85760a766d310d75bc484e933e96cb0c02f
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertGreater(len(entity_vertices), 0)
return entity_vertices[0][VProps.ID]
|
<commit_before># Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertTrue(len(entity_vertices) > 0)
return entity_vertices[0][VProps.ID]
<commit_msg>Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
assertGreater provides a nicer error message if it fails.
Change-Id: Ibfe6a85760a766d310d75bc484e933e96cb0c02f<commit_after>
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertGreater(len(entity_vertices), 0)
return entity_vertices[0][VProps.ID]
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertTrue(len(entity_vertices) > 0)
return entity_vertices[0][VProps.ID]
Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
assertGreater provides a nicer error message if it fails.
Change-Id: Ibfe6a85760a766d310d75bc484e933e96cb0c02f# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertGreater(len(entity_vertices), 0)
return entity_vertices[0][VProps.ID]
|
<commit_before># Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertTrue(len(entity_vertices) > 0)
return entity_vertices[0][VProps.ID]
<commit_msg>Use assertGreater(len(x), y) instead of assertTrue(len(x) > y)
assertGreater provides a nicer error message if it fails.
Change-Id: Ibfe6a85760a766d310d75bc484e933e96cb0c02f<commit_after># Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.tests.functional.base import TestFunctionalBase
class TestDataSourcesBase(TestFunctionalBase):
def _find_entity_id_by_type(self, graph, type_):
entity_vertices = graph.get_vertices(vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: type_
})
self.assertGreater(len(entity_vertices), 0)
return entity_vertices[0][VProps.ID]
|
3eb796eca4d3dbfd5db9af52166f96cb34654dc8
|
networking_nec/plugins/necnwa/l3/db_api.py
|
networking_nec/plugins/necnwa/l3/db_api.py
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
rt_tid = None
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
|
Move return to inside try
|
Move return to inside try
It is easy to understand if 'return' statement is moved
inside try clause and there is no need to initialize a value.
Follow-up minor fixes for review 276086
Change-Id: I3968e1702c0129ae02517e817da189ca137e7ab4
|
Python
|
apache-2.0
|
openstack/networking-nec,openstack/networking-nec,stackforge/networking-nec,stackforge/networking-nec
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
rt_tid = None
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
Move return to inside try
It is easy to understand if 'return' statement is moved
inside try clause and there is no need to initialize a value.
Follow-up minor fixes for review 276086
Change-Id: I3968e1702c0129ae02517e817da189ca137e7ab4
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
|
<commit_before># Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
rt_tid = None
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
<commit_msg>Move return to inside try
It is easy to understand if 'return' statement is moved
inside try clause and there is no need to initialize a value.
Follow-up minor fixes for review 276086
Change-Id: I3968e1702c0129ae02517e817da189ca137e7ab4<commit_after>
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
|
# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
rt_tid = None
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
Move return to inside try
It is easy to understand if 'return' statement is moved
inside try clause and there is no need to initialize a value.
Follow-up minor fixes for review 276086
Change-Id: I3968e1702c0129ae02517e817da189ca137e7ab4# Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
|
<commit_before># Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
rt_tid = None
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
<commit_msg>Move return to inside try
It is easy to understand if 'return' statement is moved
inside try clause and there is no need to initialize a value.
Follow-up minor fixes for review 276086
Change-Id: I3968e1702c0129ae02517e817da189ca137e7ab4<commit_after># Copyright 2015-2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def get_tenant_id_by_router(session, router_id):
with session.begin(subtransactions=True):
try:
router = session.query(l3_db.Router).filter_by(id=router_id).one()
rt_tid = router.tenant_id
LOG.debug("rt_tid=%s", rt_tid)
return rt_tid
except sa_exc.NoResultFound:
LOG.debug("router not found %s", router_id)
|
40653d829efcc0461d0da9472111aa89b41e08f1
|
hasjob/views/login.py
|
hasjob/views/login.py
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
Support for Lastuser push notifications.
|
Support for Lastuser push notifications.
|
Python
|
agpl-3.0
|
qitianchan/hasjob,qitianchan/hasjob,hasgeek/hasjob,hasgeek/hasjob,nhannv/hasjob,hasgeek/hasjob,sindhus/hasjob,sindhus/hasjob,nhannv/hasjob,sindhus/hasjob,qitianchan/hasjob,sindhus/hasjob,qitianchan/hasjob,ashwin01/hasjob,ashwin01/hasjob,hasgeek/hasjob,ashwin01/hasjob,ashwin01/hasjob,nhannv/hasjob,nhannv/hasjob,qitianchan/hasjob,ashwin01/hasjob,nhannv/hasjob,sindhus/hasjob
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
Support for Lastuser push notifications.
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
<commit_before># -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
<commit_msg>Support for Lastuser push notifications.<commit_after>
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
Support for Lastuser push notifications.# -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
<commit_before># -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
<commit_msg>Support for Lastuser push notifications.<commit_after># -*- coding: utf-8 -*-
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
a05403c2cfe99926b650024e8de08942eda837c6
|
indexdigest/linters/linter_0034_missing_primary_index.py
|
indexdigest/linters/linter_0034_missing_primary_index.py
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = list(filter(
lambda index: index.is_primary or index.is_unique,
database.get_table_indices(table)
))
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = [
index for index in database.get_table_indices(table)
if index.is_primary or index.is_unique
]
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
Use list comprehension as pylint suggests
|
Use list comprehension as pylint suggests
|
Python
|
mit
|
macbre/index-digest,macbre/index-digest
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = list(filter(
lambda index: index.is_primary or index.is_unique,
database.get_table_indices(table)
))
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
Use list comprehension as pylint suggests
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = [
index for index in database.get_table_indices(table)
if index.is_primary or index.is_unique
]
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
<commit_before>"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = list(filter(
lambda index: index.is_primary or index.is_unique,
database.get_table_indices(table)
))
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
<commit_msg>Use list comprehension as pylint suggests<commit_after>
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = [
index for index in database.get_table_indices(table)
if index.is_primary or index.is_unique
]
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = list(filter(
lambda index: index.is_primary or index.is_unique,
database.get_table_indices(table)
))
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
Use list comprehension as pylint suggests"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = [
index for index in database.get_table_indices(table)
if index.is_primary or index.is_unique
]
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
<commit_before>"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = list(filter(
lambda index: index.is_primary or index.is_unique,
database.get_table_indices(table)
))
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
<commit_msg>Use list comprehension as pylint suggests<commit_after>"""
This linter reports missing primary / unique index
"""
from collections import OrderedDict
from indexdigest.utils import LinterEntry
def check_missing_primary_index(database):
"""
:type database indexdigest.database.Database
:rtype: list[LinterEntry]
"""
for table in database.get_tables():
# list non-primary (and non-unique) indices only
# @see https://bugs.mysql.com/bug.php?id=76252
# @see https://github.com/Wikia/app/pull/9863
indices = [
index for index in database.get_table_indices(table)
if index.is_primary or index.is_unique
]
if indices:
# so we have at least one primary or unique index defined
continue
context = OrderedDict()
context['schema'] = database.get_table_schema(table)
yield LinterEntry(linter_type='missing_primary_index', table_name=table,
message='"{}" table does not have any primary or unique index'.
format(table),
context=context)
|
dce404d65f1f2b8f297cfa066210b885621d38d0
|
graphene/commands/exit_command.py
|
graphene/commands/exit_command.py
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
Fix EXIT command to have execute method for abstract class
|
Fix EXIT command to have execute method for abstract class
|
Python
|
apache-2.0
|
PHB-CS123/graphene,PHB-CS123/graphene,PHB-CS123/graphene
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
Fix EXIT command to have execute method for abstract class
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
<commit_before>from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
<commit_msg>Fix EXIT command to have execute method for abstract class<commit_after>
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
Fix EXIT command to have execute method for abstract classfrom graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
<commit_before>from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
<commit_msg>Fix EXIT command to have execute method for abstract class<commit_after>from graphene.commands.command import Command
class ExitCommand(Command):
def __init__(self):
pass
def execute(self, storage_manager, timer=None):
# This should never be used anyway.
pass
|
baea090319eafa6c6cfac397c6b0be4d7ca34342
|
rplugin/python3/deoplete/sources/LanguageClientSource.py
|
rplugin/python3/deoplete/sources/LanguageClientSource.py
|
from .base import Base
CompleteOutputs = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(CompleteOutputs)
if len(outputs) != 0:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(CompleteOutputs))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
# f = open("/tmp/deoplete.log", "w")
# def log(message):
# f.writelines([message])
# f.flush()
|
from .base import Base
COMPLETE_OUTPUTS = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(COMPLETE_OUTPUTS)
if outputs:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(COMPLETE_OUTPUTS))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
|
Fix deoplete variable naming and conditional logic
|
Fix deoplete variable naming and conditional logic
* Module-level variables should be CAPITALIZED.
* if len(my_list) != 0 can be more-safely changed to "if my_list"
* An empty list if falsey, a non-empty list is truthy. We're also safe
from unexpected "None" values now.
* Cleans up unnecessary comments that somehow made their way into
the VCS at the bottom
|
Python
|
mit
|
autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim
|
from .base import Base
CompleteOutputs = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(CompleteOutputs)
if len(outputs) != 0:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(CompleteOutputs))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
# f = open("/tmp/deoplete.log", "w")
# def log(message):
# f.writelines([message])
# f.flush()
Fix deoplete variable naming and conditional logic
* Module-level variables should be CAPITALIZED.
* if len(my_list) != 0 can be more-safely changed to "if my_list"
* An empty list if falsey, a non-empty list is truthy. We're also safe
from unexpected "None" values now.
* Cleans up unnecessary comments that somehow made their way into
the VCS at the bottom
|
from .base import Base
COMPLETE_OUTPUTS = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(COMPLETE_OUTPUTS)
if outputs:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(COMPLETE_OUTPUTS))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
|
<commit_before>from .base import Base
CompleteOutputs = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(CompleteOutputs)
if len(outputs) != 0:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(CompleteOutputs))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
# f = open("/tmp/deoplete.log", "w")
# def log(message):
# f.writelines([message])
# f.flush()
<commit_msg>Fix deoplete variable naming and conditional logic
* Module-level variables should be CAPITALIZED.
* if len(my_list) != 0 can be more-safely changed to "if my_list"
* An empty list if falsey, a non-empty list is truthy. We're also safe
from unexpected "None" values now.
* Cleans up unnecessary comments that somehow made their way into
the VCS at the bottom<commit_after>
|
from .base import Base
COMPLETE_OUTPUTS = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(COMPLETE_OUTPUTS)
if outputs:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(COMPLETE_OUTPUTS))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
|
from .base import Base
CompleteOutputs = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(CompleteOutputs)
if len(outputs) != 0:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(CompleteOutputs))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
# f = open("/tmp/deoplete.log", "w")
# def log(message):
# f.writelines([message])
# f.flush()
Fix deoplete variable naming and conditional logic
* Module-level variables should be CAPITALIZED.
* if len(my_list) != 0 can be more-safely changed to "if my_list"
* An empty list if falsey, a non-empty list is truthy. We're also safe
from unexpected "None" values now.
* Cleans up unnecessary comments that somehow made their way into
the VCS at the bottomfrom .base import Base
COMPLETE_OUTPUTS = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(COMPLETE_OUTPUTS)
if outputs:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(COMPLETE_OUTPUTS))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
|
<commit_before>from .base import Base
CompleteOutputs = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(CompleteOutputs)
if len(outputs) != 0:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(CompleteOutputs))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
# f = open("/tmp/deoplete.log", "w")
# def log(message):
# f.writelines([message])
# f.flush()
<commit_msg>Fix deoplete variable naming and conditional logic
* Module-level variables should be CAPITALIZED.
* if len(my_list) != 0 can be more-safely changed to "if my_list"
* An empty list if falsey, a non-empty list is truthy. We're also safe
from unexpected "None" values now.
* Cleans up unnecessary comments that somehow made their way into
the VCS at the bottom<commit_after>from .base import Base
COMPLETE_OUTPUTS = "g:LanguageClient_omniCompleteResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.min_pattern_length = 1
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.input_pattern = r'(\.|::|->)\w*$'
def gather_candidates(self, context):
if context["is_async"]:
outputs = self.vim.eval(COMPLETE_OUTPUTS)
if outputs:
context["is_async"] = False
# TODO: error handling.
candidates = outputs[0].get("result", [])
# log(str(candidates))
return candidates
else:
context["is_async"] = True
self.vim.command("let {} = []".format(COMPLETE_OUTPUTS))
character = (context["complete_position"]
+ len(context["complete_str"]))
self.vim.funcs.LanguageClient_omniComplete({
"character": character,
"complete_position": context["complete_position"],
})
return []
|
0850a64ac3758b935a99730733a31710e5178ee7
|
readthedocs/settings/postgres.py
|
readthedocs/settings/postgres.py
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
#HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
Remove this haystack setting as well.
|
Remove this haystack setting as well.
|
Python
|
mit
|
wijerasa/readthedocs.org,GovReady/readthedocs.org,kenwang76/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,attakei/readthedocs-oauth,tddv/readthedocs.org,espdev/readthedocs.org,attakei/readthedocs-oauth,techtonik/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,raven47git/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,clarkperkins/readthedocs.org,raven47git/readthedocs.org,VishvajitP/readthedocs.org,agjohnson/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,d0ugal/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,titiushko/readthedocs.org,mrshoki/readthedocs.org,tddv/readthedocs.org,VishvajitP/readthedocs.org,istresearch/readthedocs.org,michaelmcandrew/readthedocs.org,soulshake/readthedocs.org,stevepiercy/readthedocs.org,SteveViss/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,jerel/readthedocs.org,johncosta/private-readthedocs.org,davidfischer/readthedocs.org,takluyver/readthedocs.org,GovReady/readthedocs.org,johncosta/private-readthedocs.org,kenshinthebattosai/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,takluyver/readthedocs.org,hach-que/readthedocs.org,laplaceliu/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,techtonik/readthedocs.org,LukasBoersma/readthedocs.org,nikolas/readthedocs.org,d0ugal/readthedocs.org,cgourlay/readthedocs.org,kdkeyser/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,GovReady/readthedocs.org,d0ugal/readthedocs.org,royalwang/readthedocs.org,singingwolfboy/readthedocs.org,titiushko/readthedocs.org,laplaceliu/readthedocs.org,VishvajitP/readthedocs.org,soulshake/readthedocs.org,asampat3090/readthedocs.org,singingwolfboy/readthedocs.org,nikolas/readthedocs.org,fujita-shintaro/readthedocs.org,mrshoki/readthedocs.org,hach-que/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,LukasBoersma/readthedocs.org,rtfd/readthedocs.org,takluyver/readthedocs.org,SteveViss/readthedocs.org,dirn/readthedocs.org,atsuyim/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,safwanrahman/readthedocs.org,emawind84/readthedocs.org,kdkeyser/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,dirn/readthedocs.org,kdkeyser/readthedocs.org,dirn/readthedocs.org,wanghaven/readthedocs.org,rtfd/readthedocs.org,Tazer/readthedocs.org,hach-que/readthedocs.org,sils1297/readthedocs.org,davidfischer/readthedocs.org,Tazer/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,sils1297/readthedocs.org,stevepiercy/readthedocs.org,sunnyzwh/readthedocs.org,pombredanne/readthedocs.org,davidfischer/readthedocs.org,Tazer/readthedocs.org,singingwolfboy/readthedocs.org,sunnyzwh/readthedocs.org,nyergler/pythonslides,mhils/readthedocs.org,laplaceliu/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,Carreau/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,clarkperkins/readthedocs.org,soulshake/readthedocs.org,Carreau/readthedocs.org,asampat3090/readthedocs.org,safwanrahman/readthedocs.org,wanghaven/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,ojii/readthedocs.org,Carreau/readthedocs.org,agjohnson/readthedocs.org,istresearch/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,mrshoki/readthedocs.org,clarkperkins/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,ojii/readthedocs.org,Carreau/readthedocs.org,agjohnson/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,nyergler/pythonslides,soulshake/readthedocs.org,cgourlay/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,royalwang/readthedocs.org,jerel/readthedocs.org,gjtorikian/readthedocs.org,istresearch/readthedocs.org,kenwang76/readthedocs.org,fujita-shintaro/readthedocs.org,KamranMackey/readthedocs.org,SteveViss/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,kenshinthebattosai/readthedocs.org,emawind84/readthedocs.org,espdev/readthedocs.org,sils1297/readthedocs.org,sid-kap/readthedocs.org,michaelmcandrew/readthedocs.org,atsuyim/readthedocs.org,rtfd/readthedocs.org,mhils/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,kenshinthebattosai/readthedocs.org,nikolas/readthedocs.org,KamranMackey/readthedocs.org,LukasBoersma/readthedocs.org,johncosta/private-readthedocs.org,techtonik/readthedocs.org,titiushko/readthedocs.org,espdev/readthedocs.org,laplaceliu/readthedocs.org,sunnyzwh/readthedocs.org,gjtorikian/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,atsuyim/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,nyergler/pythonslides,d0ugal/readthedocs.org,agjohnson/readthedocs.org,stevepiercy/readthedocs.org
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
Remove this haystack setting as well.
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
#HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
<commit_before>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
<commit_msg>Remove this haystack setting as well.<commit_after>
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
#HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
Remove this haystack setting as well.from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
#HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
<commit_before>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
<commit_msg>Remove this haystack setting as well.<commit_after>from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': '10.177.73.97',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
#HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
SLUMBER_API_HOST = 'http://readthedocs.org'
try:
from local_settings import *
except:
pass
|
c7578896036bc07bb1edc2d79f699968c25ca89e
|
bika/lims/upgrade/to1117.py
|
bika/lims/upgrade/to1117.py
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
Upgrade 1117 - add run_dependencies=False
|
Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
<commit_before>from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
<commit_msg>Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.<commit_after>
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
<commit_before>from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets')
<commit_msg>Upgrade 1117 - add run_dependencies=False
Somehow re-importing the 'portlets' step, causes
a beforeDelete handler to fail a HoldingReference
check.<commit_after>from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def upgrade(tool):
""" Enable portlets for key=/ (re-import portlets.xml): issue #695
"""
portal = aq_parent(aq_inner(tool))
setup = portal.portal_setup
setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets',
run_dependencies=False)
|
7cde5e713ace2b0a1d9cdef01ac912f3a53814cd
|
run_scripts/build_phylogenies.py
|
run_scripts/build_phylogenies.py
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
Change run script to use worker pool
|
Change run script to use worker pool
|
Python
|
bsd-3-clause
|
fmaguire/dendrogenous
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
Change run script to use worker pool
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
<commit_before>#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
<commit_msg>Change run script to use worker pool<commit_after>
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
Change run script to use worker pool#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
<commit_before>#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
processes = [multiprocessing.Process(target=build_phylogeny, args=(seq, settings)) for seq in seqs_needing_run]
for p in processes:
p.start()
for p in processes:
p.join()
def build_phylogeny(seq, settings):
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
<commit_msg>Change run script to use worker pool<commit_after>#!/usr/bin/env python
# Automatically generate phylogenies from a settings file
# specifying input fasta and genomes
import sys
import dendrogenous as dg
import dendrogenous.settings
import dendrogenous.utils
import dendrogenous.core
import joblib
import pickle
#multiprocessing
def main(settings_file):
settings = dg.settings.Settings(settings_file)
input_seqs = dg.utils.parse_seqs(settings.input_seqs)
seqs_needing_run = dg.utils.check_already_run(settings, input_seqs)
r = joblib.Parallel(n_jobs=24, verbose=5)(joblib.delayed(pool_process)\
(seq, settings_file) for seq in seqs_needing_run)
def pool_process(seq, settings_file):
"""
A hacky and unecessary way to provide a pickle serealisable
object for multiprocessing to pass off to workers
- inefficiency in reinstantiating a settings class every time
"""
settings = dg.settings.Settings(settings_file)
seq_job = dg.core.Dendrogenous(seq, settings)
seq_job.build_named_phylogeny()
if __name__=='__main__':
if len(sys.argv) != 2:
print("USAGE: build_phylogenies.py settings.json")
sys.exit(1)
main(sys.argv[1])
|
a11c839988b71e9f769cb5ba856474205b7aeefb
|
jsonschema/tests/fuzz_validate.py
|
jsonschema/tests/fuzz_validate.py
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
Fix fuzzer to include instrumentation
|
Fix fuzzer to include instrumentation
|
Python
|
mit
|
python-jsonschema/jsonschema
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
Fix fuzzer to include instrumentation
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
<commit_before>"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
<commit_msg>Fix fuzzer to include instrumentation<commit_after>
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
Fix fuzzer to include instrumentation"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
<commit_before>"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
<commit_msg>Fix fuzzer to include instrumentation<commit_after>"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.instrument_all()
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()
|
da843b10a92cd4f2f95f18f78f9ea03dcd9a67d5
|
packages/Python/lldbsuite/support/seven.py
|
packages/Python/lldbsuite/support/seven.py
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True).rstrip())
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
Remove trailing characters from command output.
|
[testsuite] Remove trailing characters from command output.
When running the test suite on macOS with Python 3 we noticed a
difference in behavior between Python 2 and Python 3 for
seven.get_command_output. The output contained a newline with Python 3,
but not for Python 2. This resulted in an invalid SDK path passed to the
compiler.
Differential revision: https://reviews.llvm.org/D57275
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352397 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
[testsuite] Remove trailing characters from command output.
When running the test suite on macOS with Python 3 we noticed a
difference in behavior between Python 2 and Python 3 for
seven.get_command_output. The output contained a newline with Python 3,
but not for Python 2. This resulted in an invalid SDK path passed to the
compiler.
Differential revision: https://reviews.llvm.org/D57275
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352397 91177308-0d34-0410-b5e6-96231b3b80d8
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True).rstrip())
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
<commit_before>import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
<commit_msg>[testsuite] Remove trailing characters from command output.
When running the test suite on macOS with Python 3 we noticed a
difference in behavior between Python 2 and Python 3 for
seven.get_command_output. The output contained a newline with Python 3,
but not for Python 2. This resulted in an invalid SDK path passed to the
compiler.
Differential revision: https://reviews.llvm.org/D57275
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352397 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True).rstrip())
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
[testsuite] Remove trailing characters from command output.
When running the test suite on macOS with Python 3 we noticed a
difference in behavior between Python 2 and Python 3 for
seven.get_command_output. The output contained a newline with Python 3,
but not for Python 2. This resulted in an invalid SDK path passed to the
compiler.
Differential revision: https://reviews.llvm.org/D57275
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352397 91177308-0d34-0410-b5e6-96231b3b80d8import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True).rstrip())
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
<commit_before>import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True))
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
<commit_msg>[testsuite] Remove trailing characters from command output.
When running the test suite on macOS with Python 3 we noticed a
difference in behavior between Python 2 and Python 3 for
seven.get_command_output. The output contained a newline with Python 3,
but not for Python 2. This resulted in an invalid SDK path passed to the
compiler.
Differential revision: https://reviews.llvm.org/D57275
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@352397 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>import six
if six.PY2:
import commands
get_command_output = commands.getoutput
get_command_status_output = commands.getstatusoutput
cmp_ = cmp
else:
def get_command_status_output(command):
try:
import subprocess
return (
0,
subprocess.check_output(
command,
shell=True,
universal_newlines=True).rstrip())
except subprocess.CalledProcessError as e:
return (e.returncode, e.output)
def get_command_output(command):
return get_command_status_output(command)[1]
cmp_ = lambda x, y: (x > y) - (x < y)
|
224d9f4e243f6645e88b32ad7342a55128f19eeb
|
html5lib/__init__.py
|
html5lib/__init__.py
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
Fix formatting of docstring example
|
Fix formatting of docstring example
It runs together in the built HTML.
|
Python
|
mit
|
html5lib/html5lib-python,html5lib/html5lib-python,html5lib/html5lib-python
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
Fix formatting of docstring example
It runs together in the built HTML.
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
<commit_before>"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
<commit_msg>Fix formatting of docstring example
It runs together in the built HTML.<commit_after>
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
Fix formatting of docstring example
It runs together in the built HTML."""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
<commit_before>"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
<commit_msg>Fix formatting of docstring example
It runs together in the built HTML.<commit_after>"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage::
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "0.9999999999-dev"
|
f748facb9edd35ca6c61be336cad3109cafbbc89
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Add test for index route
|
Add test for index route
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
Add test for index route
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for index route<commit_after>
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
Add test for index routeimport unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from flask import json
from api import create_app, db
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app(config_name='TestingEnv')
self.client = self.app.test_client()
# Binds the app to current context
with self.app.app_context():
# Create all tables
db.create_all()
def tearDown(self):
# Drop all tables
with self.app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
def test_something(self):
self.assertTrue(1)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for index route<commit_after>import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
1bc9bf9b3bab521c4a144e00d24477afae0eb0df
|
examples/basic_datalogger.py
|
examples/basic_datalogger.py
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.117')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.104')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.decimation_rate = 5e6 # 100Hz
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
Fix the datalogger example script to use roll mode and a high decimation
|
Fix the datalogger example script to use roll mode and a high decimation
|
Python
|
mit
|
liquidinstruments/pymoku,benizl/pymoku
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.117')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
Fix the datalogger example script to use roll mode and a high decimation
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.104')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.decimation_rate = 5e6 # 100Hz
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
<commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.117')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
<commit_msg>Fix the datalogger example script to use roll mode and a high decimation<commit_after>
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.104')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.decimation_rate = 5e6 # 100Hz
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.117')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
Fix the datalogger example script to use roll mode and a high decimationfrom pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.104')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.decimation_rate = 5e6 # 100Hz
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
<commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.117')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
<commit_msg>Fix the datalogger example script to use roll mode and a high decimation<commit_after>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.DEBUG)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku('192.168.1.104')
i = m.discover_instrument()
if i is None or i.type != 'oscilloscope':
print "No or wrong instrument deployed"
i = Oscilloscope()
m.attach_instrument(i)
else:
print "Attached to existing Oscilloscope"
i.set_defaults()
i.decimation_rate = 5e6 # 100Hz
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_start(1)
while True:
time.sleep(1)
s = i.datalogger_status()
b = i.datalogger_transferred()
print "Status %d (%d samples)" % (s, b)
# TODO: Symbolic constants
if s == 0 or s == 7:
break
i.datalogger_stop()
m.close()
|
644896c856b1e6ad20a3790234439b8ac8403917
|
examples/dft/12-camb3lyp.py
|
examples/dft/12-camb3lyp.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
Update the camb3lyp example to libxc 5 series
|
Update the camb3lyp example to libxc 5 series
|
Python
|
apache-2.0
|
sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
Update the camb3lyp example to libxc 5 series
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
<commit_before>#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
<commit_msg>Update the camb3lyp example to libxc 5 series<commit_after>
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
Update the camb3lyp example to libxc 5 series#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
<commit_before>#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
<commit_msg>Update the camb3lyp example to libxc 5 series<commit_after>#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
bf007267246bd317dc3ccad9f5cf8a9f452b3e0b
|
firecares/utils/__init__.py
|
firecares/utils/__init__.py
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
Remove the unused convert_png_to_jpg method.
|
Remove the unused convert_png_to_jpg method.
|
Python
|
mit
|
FireCARES/firecares,FireCARES/firecares,meilinger/firecares,meilinger/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
Remove the unused convert_png_to_jpg method.
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
<commit_before>from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
<commit_msg>Remove the unused convert_png_to_jpg method.<commit_after>
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
Remove the unused convert_png_to_jpg method.from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
<commit_before>from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
<commit_msg>Remove the unused convert_png_to_jpg method.<commit_after>from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
|
49f5802a02a550cc8cee3be417426a83c31de5c9
|
Source/Git/Experiments/git_log.py
|
Source/Git/Experiments/git_log.py
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
Exit the loop early when experimenting.
|
Exit the loop early when experimenting.
|
Python
|
apache-2.0
|
barry-scott/scm-workbench,barry-scott/scm-workbench,barry-scott/scm-workbench
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
Exit the loop early when experimenting.
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
<commit_before>#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
<commit_msg>Exit the loop early when experimenting.<commit_after>
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
Exit the loop early when experimenting.#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
<commit_before>#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
<commit_msg>Exit the loop early when experimenting.<commit_after>#!/usr/bin/python3
import sys
import git
r = git.Repo( sys.argv[1] )
def printTree( tree, indent=0 ):
prefix = ' '*indent
print( prefix, '-' * 16 )
print( prefix, 'Tree path %s' % (tree.path,) )
for blob in tree:
print( prefix, '%s %s (%s)' % (blob.type, blob.path, blob.hexsha) )
for child in tree.trees:
printTree( child, indent+4 )
for index, commit in enumerate(r.iter_commits( None )):
print( '=' * 60 )
for name in sorted( dir( commit ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
print( 'Commit: %s: %r' % (name, getattr( commit, name )) )
print( '-' * 60 )
stats = commit.stats
for name in sorted( dir( stats ) ):
if name[0] not in 'abcdefghijklmnopqrstuvwxyz':
continue
if name == 'files':
for file in stats.files:
print( 'Commit.Stats.files: %s: %r' % (file, stats.files[file]) )
else:
print( 'Commit.Stats: %s: %r' % (name, getattr( stats, name )) )
print( '-' * 60 )
tree = commit.tree
printTree( tree )
if index > 1:
break
|
4e6ec9cc5b052341094723433f58a21020fa82f0
|
tools/scheduler/scheduler/core.py
|
tools/scheduler/scheduler/core.py
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
Add binary_url member to Job.
|
Add binary_url member to Job.
|
Python
|
apache-2.0
|
DaMSL/K3,DaMSL/K3,yliu120/K3
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
Add binary_url member to Job.
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
<commit_before># scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
<commit_msg>Add binary_url member to Job.<commit_after>
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
Add binary_url member to Job.# scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
<commit_before># scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles):
self.roles = roles
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
<commit_msg>Add binary_url member to Job.<commit_after># scheduler.core: Data structures for managing K3 jobs.
class Job:
def __init__(self, roles, binary_url):
self.roles = roles
self.binary_url = binary_url
self.tasks = None
self.status = None
class Role:
def __init__(self, peers = 0, variables {}, inputs {}, hostmask = r"*"):
self.peers = peers
self.variables = variables,
self.inputs = inputs
self.hostmask = hostmask
class Task:
def __init__(self):
self.status = None
|
0c266606ec10a8814ce749925a727ea57dd32aeb
|
test/343-winter-sports-resorts.py
|
test/343-winter-sports-resorts.py
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 33 })
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 36 })
|
Fix other failure - assuming that the change in sort key value for winter sports was intentional.
|
Fix other failure - assuming that the change in sort key value for winter sports was intentional.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 33 })
Fix other failure - assuming that the change in sort key value for winter sports was intentional.
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 36 })
|
<commit_before>assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 33 })
<commit_msg>Fix other failure - assuming that the change in sort key value for winter sports was intentional.<commit_after>
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 36 })
|
assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 33 })
Fix other failure - assuming that the change in sort key value for winter sports was intentional.assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 36 })
|
<commit_before>assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 33 })
<commit_msg>Fix other failure - assuming that the change in sort key value for winter sports was intentional.<commit_after>assert_has_feature(
15, 5467, 12531, 'landuse',
{ 'kind': 'winter_sports',
'sort_key': 36 })
|
40d204c996e41a030dac240c99c66a25f8f8586e
|
scripts/generate-bcrypt-hashed-password.py
|
scripts/generate-bcrypt-hashed-password.py
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
Fix "string argument without an encoding" python3 error in bcrypt script
|
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
<commit_before>#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
<commit_msg>Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.<commit_after>
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
<commit_before>#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(bytes(password), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
<commit_msg>Fix "string argument without an encoding" python3 error in bcrypt script
generate-bcrypt-hashed-password raises an error on python3 since
`bytes` is called without an encoding argument. Replacing it with
`.encode` should fix the problem.<commit_after>#!/usr/bin/env python
"""
A script to return a bcrypt hash of a password.
It's intended use is for creating known passwords to replace user passwords in cleaned up databases.
Cost-factor is the log2 number of rounds of hashing to use for the salt. It's worth researching how many rounds you need
for your use context, but recent recommendations are 10-12 as a minimum.
Usage:
scripts/generate-bcrpyt-hashed-password.py <password> <cost-factor>
"""
import bcrypt
from docopt import docopt
def hash_password(password, cost_factor):
return bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt(cost_factor)).decode('utf-8')
if __name__ == "__main__":
arguments = docopt(__doc__)
password = arguments['<password>']
cost_factor = int(arguments['<cost-factor>'])
print(hash_password(password, cost_factor))
|
4037036de79f6503921bbd426bb5352f2f86f12b
|
plyer/platforms/android/camera.py
|
plyer/platforms/android/camera.py
|
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
Revert "Activity was imported twice"
|
Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.
|
Python
|
mit
|
johnbolia/plyer,johnbolia/plyer,kived/plyer,kivy/plyer,cleett/plyer,kived/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,cleett/plyer,kostyll/plyer,KeyWeeUsr/plyer,kostyll/plyer
|
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.
|
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
<commit_before>from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
<commit_msg>Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.<commit_after>
|
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
<commit_before>from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
activity.unbind(on_activity_result=self._on_activity_result)
activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
<commit_msg>Revert "Activity was imported twice"
This reverts commit a0600929774c1e90c7dc43043ff87b5ea84213b4.<commit_after>
import android
import android.activity
from os import unlink
from jnius import autoclass, cast
from plyer.facades import Camera
from plyer.platforms.android import activity
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
class AndroidCamera(Camera):
def _take_picture(self, on_complete, filename=None):
assert(on_complete is not None)
self.on_complete = on_complete
self.filename = filename
android.activity.unbind(on_activity_result=self._on_activity_result)
android.activity.bind(on_activity_result=self._on_activity_result)
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
uri = Uri.parse('file://' + filename)
parcelable = cast('android.os.Parcelable', uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable)
activity.startActivityForResult(intent, 0x123)
def _on_activity_result(self, requestCode, resultCode, intent):
if requestCode != 0x123:
return
android.activity.unbind(on_activity_result=self._on_activity_result)
if self.on_complete(self.filename):
self._unlink(self.filename)
def _unlink(self, fn):
try:
unlink(fn)
except:
pass
def instance():
return AndroidCamera()
|
7a5b86bcb8c0a2e8c699c7602cef50bed2acef1b
|
src/keybar/tests/factories/user.py
|
src/keybar/tests/factories/user.py
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
Use pdbkdf_sha256 hasher for testing too.
|
Use pdbkdf_sha256 hasher for testing too.
|
Python
|
bsd-3-clause
|
keybar/keybar
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
Use pdbkdf_sha256 hasher for testing too.
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
<commit_before>import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
<commit_msg>Use pdbkdf_sha256 hasher for testing too.<commit_after>
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
Use pdbkdf_sha256 hasher for testing too.import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
<commit_before>import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='md5')
return super(UserFactory, cls)._prepare(create, **kwargs)
<commit_msg>Use pdbkdf_sha256 hasher for testing too.<commit_after>import factory
from django.contrib.auth.hashers import make_password
from keybar.models.user import User
class UserFactory(factory.DjangoModelFactory):
email = factory.Sequence(lambda i: '{0}@none.none'.format(i))
is_active = True
class Meta:
model = User
@classmethod
def _prepare(cls, create, **kwargs):
raw_password = kwargs.pop('raw_password', 'secret')
if 'password' not in kwargs:
kwargs['password'] = make_password(raw_password, hasher='pbkdf2_sha256')
return super(UserFactory, cls)._prepare(create, **kwargs)
|
5c29b4322d1a24c4f389076f2a9b8acbeabd89e2
|
python/lumidatumclient/classes.py
|
python/lumidatumclient/classes.py
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
Python
|
mit
|
Lumidatum/lumidatumclients,Lumidatum/lumidatumclients,daws/lumidatumclients,Lumidatum/lumidatumclients
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
Fix for os.path.join with model_id, was breaking on non-string model_id values.
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
<commit_before>import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
<commit_msg>Fix for os.path.join with model_id, was breaking on non-string model_id values.<commit_after>
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
Fix for os.path.join with model_id, was breaking on non-string model_id values.import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
<commit_before>import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = model_id
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = model_if if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
<commit_msg>Fix for os.path.join with model_id, was breaking on non-string model_id values.<commit_after>import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
|
7b2dac39cdcbc8d5f05d4979df06bf1ab1ae065f
|
goetia/pythonizors/pythonize_parsing.py
|
goetia/pythonizors/pythonize_parsing.py
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
Fix std::optional access in SplitPairedReader pythonization
|
Fix std::optional access in SplitPairedReader pythonization
|
Python
|
mit
|
camillescott/boink,camillescott/boink,camillescott/boink,camillescott/boink
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__Fix std::optional access in SplitPairedReader pythonization
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
<commit_before>from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__<commit_msg>Fix std::optional access in SplitPairedReader pythonization<commit_after>
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__Fix std::optional access in SplitPairedReader pythonizationfrom goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
<commit_before>from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
pair = self.next()
left = pair.left if pair.has_left else None
right = pair.right if pair.has_right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__<commit_msg>Fix std::optional access in SplitPairedReader pythonization<commit_after>from goetia.pythonizors.utils import is_template_inst
def pythonize_goetia_parsing(klass, name):
is_fastx, _ = is_template_inst(name, 'FastxParser')
if is_fastx:
def __iter__(self):
while not self.is_complete():
record = self.next()
if record:
yield record
klass.__iter__ = __iter__
is_split, _ = is_template_inst(name, 'SplitPairedReader')
if is_split:
def __iter__(self):
while not self.is_complete():
left, right = self.next()
left, right = left.value() if left else None, right.value() if right else None
if left is not None or right is not None:
yield left, right
klass.__iter__ = __iter__
|
ccb7446b02b394af308f4fba0500d402240f117e
|
home/migrations/0002_create_homepage.py
|
home/migrations/0002_create_homepage.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
Python
|
mit
|
OpenCanada/lindinitiative,OpenCanada/lindinitiative
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
<commit_msg>Remove any existing localhost sites and use the page id rather than the object to set the default homepage.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
Remove any existing localhost sites and use the page id rather than the object to set the default homepage.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page=homepage, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
<commit_msg>Remove any existing localhost sites and use the page id rather than the object to set the default homepage.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def create_homepage(apps, schema_editor):
# Get models
ContentType = apps.get_model('contenttypes.ContentType')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model('home.HomePage')
# Delete the default homepage
Page.objects.get(id=2).delete()
# Create content type for homepage model
homepage_content_type, created = ContentType.objects.get_or_create(
model='homepage', app_label='home')
# Create a new homepage
homepage = HomePage.objects.create(
title="Homepage",
slug='home',
content_type=homepage_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
Site = apps.get_model('wagtailcore.Site')
HomePage = apps.get_model("core", "HomePage")
homepage = HomePage.objects.get(slug="home")
Site.objects.filter(hostname='localhost').delete()
# Create a site with the new homepage set as the root
Site.objects.create(
hostname='localhost', root_page_id=homepage.id, is_default_site=True)
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.RunPython(create_homepage),
]
|
1179163881fe1dedab81a02a940c711479a334ab
|
Instanssi/admin_auth/forms.py
|
Instanssi/admin_auth/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
Use passwordinput in password field.
|
admin_auth: Use passwordinput in password field.
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
admin_auth: Use passwordinput in password field.
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
<commit_before># -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
<commit_msg>admin_auth: Use passwordinput in password field.<commit_after>
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
admin_auth: Use passwordinput in password field.# -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
<commit_before># -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
<commit_msg>admin_auth: Use passwordinput in password field.<commit_after># -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
ab7856950c058d00aac99874669839e09bc116c6
|
models.py
|
models.py
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
Order feedback items by their timestamp.
|
Order feedback items by their timestamp.
|
Python
|
bsd-3-clause
|
littleweaver/django-talkback,littleweaver/django-talkback,littleweaver/django-talkback
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
Order feedback items by their timestamp.
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
<commit_before>from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
<commit_msg>Order feedback items by their timestamp.<commit_after>
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
Order feedback items by their timestamp.from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
<commit_before>from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
<commit_msg>Order feedback items by their timestamp.<commit_after>from django.conf import settings
from django.db import models
class FeedbackItem(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
resolved = models.BooleanField(default=False)
content = models.TextField()
screenshot = models.FileField(blank=True, null=True, upload_to="feedback/screenshots")
# Request Data
view = models.CharField(max_length=255)
request_path = models.CharField(max_length=255)
# The longest methods should be 7 chars, but we'll allow custom methods up
# to 20 chars just in case.
request_method = models.CharField(max_length=20, blank=True, null=True)
# How long is the longest encoding name?
request_encoding = models.CharField(max_length=20, blank=True, null=True)
request_meta = models.TextField(blank=True, null=True)
request_get = models.TextField(blank=True, null=True)
request_post = models.TextField(blank=True, null=True)
request_files = models.TextField(blank=True, null=True)
def __unicode__(self):
return "{username} at {path}".format(
username=self.user.get_full_name(),
path = self.request_path
)
class Meta:
ordering = ["-timestamp"]
|
cdaffa187b41f3a84cb5a6b44f2e781a9b249f2b
|
tests/test_users.py
|
tests/test_users.py
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert result
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
Update test to reflect new method name.
|
Update test to reflect new method name.
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert resultUpdate test to reflect new method name.
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
<commit_before>from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert result<commit_msg>Update test to reflect new method name.<commit_after>
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert resultUpdate test to reflect new method name.from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
<commit_before>from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.convert_dict_to_user_instance({})
assert result<commit_msg>Update test to reflect new method name.<commit_after>from context import slot_users_controller as uc
class TestUsers:
def test_validate_credentials_returns_true_for_valid_credentials(self):
result = uc.return_user_if_valid_credentials('slot', 'test')
assert result is True
def test_validate_credentials_returns_false_for_invalid_credentials(self):
result = uc.return_user_if_valid_credentials('bad_username', 'bad_password')
assert result is False
def test_convert_dict_to_user_instance_returns_valid_user_instance(self):
result = uc.return_user_instance_or_anonymous({})
assert result
|
63276c6de1b61fb9e9a5b7b4a4eac5813e433f80
|
tests/test_utils.py
|
tests/test_utils.py
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.base.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
Fix mock path to fix broken test.
|
Fix mock path to fix broken test.
|
Python
|
mit
|
evetrivia/thanatos
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)Fix mock path to fix broken test.
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.base.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
<commit_before>
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)<commit_msg>Fix mock path to fix broken test.<commit_after>
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.base.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)Fix mock path to fix broken test.
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.base.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
<commit_before>
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)<commit_msg>Fix mock path to fix broken test.<commit_after>
import mock
import unittest2
from thanatos import utils
class UtilsTestCase(unittest2.TestCase):
def setUp(self):
pass
@mock.patch('thanatos.questions.base.Question.__subclasses__')
def test_required_tables_returns_unique_set(self, mock_subclasses):
""" """
mock_subclass_1 = mock.Mock()
mock_subclass_1.required_tables = ['test']
mock_subclass_2 = mock.Mock()
mock_subclass_2.required_tables = ['test', 'other_test']
mock_subclasses.return_value = [mock_subclass_1, mock_subclass_2]
required_tables = utils.get_list_of_required_tables()
self.assertEqual(
required_tables,
{'test', 'other_test'}
)
|
a3cce9e4840cc687f6dcdd0b88577d2f13f3258e
|
onlineweb4/settings/raven.py
|
onlineweb4/settings/raven.py
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
Make it possible to specify which app to represent in sentry
|
Make it possible to specify which app to represent in sentry
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
Make it possible to specify which app to represent in sentry
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
<commit_before>import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
<commit_msg>Make it possible to specify which app to represent in sentry<commit_after>
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
Make it possible to specify which app to represent in sentryimport os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
<commit_before>import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
}
<commit_msg>Make it possible to specify which app to represent in sentry<commit_after>import os
import raven
from decouple import config
RAVEN_CONFIG = {
'dsn': config('OW4_RAVEN_DSN', default='https://user:pass@sentry.io/project'),
'environment': config('OW4_ENVIRONMENT', default='DEVELOP'),
# Use git to determine release
'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
'tags': { 'app': config('OW4_RAVEN_APP_NAME', default='') },
}
|
e2cc9c822abb675a196468ee89b063e0162c16d5
|
changes/api/author_build_index.py
|
changes/api/author_build_index.py
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
Move 'me' check outside of author lookup
|
Move 'me' check outside of author lookup
|
Python
|
apache-2.0
|
wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,dropbox/changes
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Move 'me' check outside of author lookup
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Move 'me' check outside of author lookup<commit_after>
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
Move 'me' check outside of author lookupfrom __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
<commit_before>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
author = self._get_author(author_id)
if not author:
if author_id == 'me':
return '', 401
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
<commit_msg>Move 'me' check outside of author lookup<commit_after>from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.api.auth import get_current_user
from changes.models import Author, Build
class AuthorBuildIndexAPIView(APIView):
def _get_author(self, author_id):
if author_id == 'me':
user = get_current_user()
if user is None:
return
return Author.query.filter_by(email=user.email).first()
return Author.query.get(author_id)
def get(self, author_id):
if author_id == 'me' and not get_current_user():
return '', 401
author = self._get_author(author_id)
if not author:
return self.respond([])
queryset = Build.query.options(
joinedload('project', innerjoin=True),
joinedload('author'),
joinedload('source'),
).filter(
Build.author_id == author.id,
).order_by(Build.date_created.desc(), Build.date_started.desc())
return self.paginate(queryset)
def get_stream_channels(self, author_id):
author = self._get_author(author_id)
if not author:
return []
return ['authors:{0}:builds'.format(author.id.hex)]
|
5d9ec3c24972772814921fb8500845ca3d8fa8b3
|
chempy/electrochemistry/nernst.py
|
chempy/electrochemistry/nernst.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.molar_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
Use actual constant name (molar_gas_constant)
|
Use actual constant name (molar_gas_constant)
|
Python
|
bsd-2-clause
|
bjodah/aqchem,bjodah/aqchem,bjodah/chempy,bjodah/aqchem,bjodah/chempy
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
Use actual constant name (molar_gas_constant)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.molar_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
<commit_msg>Use actual constant name (molar_gas_constant)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.molar_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
Use actual constant name (molar_gas_constant)# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.molar_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.ideal_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
<commit_msg>Use actual constant name (molar_gas_constant)<commit_after># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
import math
def nernst_potential(ion_conc_out, ion_conc_in, charge, T,
constants=None, units=None, backend=math):
"""
Calculates the Nernst potential using the Nernst equation for a particular
ion.
Parameters
----------
ion_conc_out: float with unit
Extracellular concentration of ion
ion_conc_in: float with unit
Intracellular concentration of ion
charge: integer
Charge of the ion
T: float with unit
Absolute temperature
constants: object (optional, default: None)
constant attributes accessed:
F - Faraday constant
R - Ideal Gas constant
units: object (optional, default: None)
unit attributes: coulomb, joule, kelvin, mol
backend: module (optional, default: math)
module used to calculate log using `log` method, can be substituted
with sympy to get symbolic answers
Returns
-------
Membrane potential
"""
if constants is None:
F = 96485.33289
R = 8.3144598
if units is not None:
F *= units.coulomb / units.mol
R *= units.joule / units.kelvin / units.mol
else:
F = constants.Faraday_constant
R = constants.molar_gas_constant
return (R * T) / (charge * F) * backend.log(ion_conc_out / ion_conc_in)
|
b8a7dd2dfc9322498dc7500f840bedd20d807ae1
|
samples/numpy_blir.py
|
samples/numpy_blir.py
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
Fix dumb out of bounds error.
|
Fix dumb out of bounds error.
|
Python
|
bsd-2-clause
|
seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
Fix dumb out of bounds error.
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
<commit_before>import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
<commit_msg>Fix dumb out of bounds error.<commit_after>
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
Fix dumb out of bounds error.import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
<commit_before>import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
x[i-1,j-1] = 10;
}
}
"""
N = 14
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
<commit_msg>Fix dumb out of bounds error.<commit_after>import numpy as np
from blaze.blir import compile, execute
source = """
def main(x: array[int], n : int) -> void {
var int i;
var int j;
for i in range(n) {
for j in range(n) {
x[i,j] = i+j;
}
}
}
"""
N = 15
ast, env = compile(source)
arr = np.eye(N, dtype='int32')
args = (arr, N)
execute(env, args, timing=True)
print arr
|
3335c3fed0718bac401e0dc305edc73830ea8c6b
|
EmeraldAI/Entities/PipelineArgs.py
|
EmeraldAI/Entities/PipelineArgs.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and len(self.SentenceList) > 0:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
return random.choice(self.GetSentencesWithHighestValue(margin))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and self.SentenceList:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
result = self.GetSentencesWithHighestValue(margin)
if(result != None):
return random.choice(result)
return None
|
Fix Pipeline Args Bug on dict length
|
Fix Pipeline Args Bug on dict length
|
Python
|
apache-2.0
|
MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and len(self.SentenceList) > 0:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
return random.choice(self.GetSentencesWithHighestValue(margin))
Fix Pipeline Args Bug on dict length
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and self.SentenceList:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
result = self.GetSentencesWithHighestValue(margin)
if(result != None):
return random.choice(result)
return None
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and len(self.SentenceList) > 0:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
return random.choice(self.GetSentencesWithHighestValue(margin))
<commit_msg>Fix Pipeline Args Bug on dict length<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and self.SentenceList:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
result = self.GetSentencesWithHighestValue(margin)
if(result != None):
return random.choice(result)
return None
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and len(self.SentenceList) > 0:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
return random.choice(self.GetSentencesWithHighestValue(margin))
Fix Pipeline Args Bug on dict length#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and self.SentenceList:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
result = self.GetSentencesWithHighestValue(margin)
if(result != None):
return random.choice(result)
return None
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and len(self.SentenceList) > 0:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
return random.choice(self.GetSentencesWithHighestValue(margin))
<commit_msg>Fix Pipeline Args Bug on dict length<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import random
from EmeraldAI.Entities.BaseObject import BaseObject
class PipelineArgs(BaseObject):
def __init__(self, input):
# Original Input
self.Input = input
self.Normalized = input
# Input Language | List of EmeraldAI.Entities.BaseObject.Word objects | List of parameters
self.Language = None
self.WordList = None
self.SentenceList = None
self.ParameterList = None
# Input with parameterized data
self.ParameterizedInput = None
# TODO Conversation History
self.History = None
# Response with patameters | Raw response string | Response ID | Response found
self.Response = None
self.ResponseRaw = None
self.ResponseID = None
self.ResponseFound = False
self.ResponseAudio = None
# TODO
self.SessionID = 0
# TODO - List of Errors
self.Error = None
def GetSentencesWithHighestValue(self, margin=0):
if self.SentenceList != None and self.SentenceList:
highestRanking = max(node.Rating for node in self.SentenceList.values())
if margin > 0:
result = [node for node in self.SentenceList.values() if node.Rating>=(highestRanking-margin)]
else:
result = [node for node in self.SentenceList.values() if node.Rating==highestRanking]
return result
return None
def GetRandomSentenceWithHighestValue(self, margin=0):
result = self.GetSentencesWithHighestValue(margin)
if(result != None):
return random.choice(result)
return None
|
379c6254da0d6a06f8c01cd7cd2632a1d59624ac
|
comics/sets/context_processors.py
|
comics/sets/context_processors.py
|
from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
|
def user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
Use request.user_set in context preprocessor
|
Use request.user_set in context preprocessor
|
Python
|
agpl-3.0
|
datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics
|
from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
Use request.user_set in context preprocessor
|
def user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
<commit_before>from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
<commit_msg>Use request.user_set in context preprocessor<commit_after>
|
def user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
Use request.user_set in context preprocessordef user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
<commit_before>from comics.sets.models import UserSet
def user_set(request):
try:
user_set = UserSet.objects.get(user=request.user)
return {
'user_set': user_set,
'user_set_comics': user_set.comics.all(),
}
except UserSet.DoesNotExist:
return {}
<commit_msg>Use request.user_set in context preprocessor<commit_after>def user_set(request):
if hasattr(request, 'user_set'):
return {
'user_set': request.user_set,
'user_set_comics': request.user_set.comics.all(),
}
else:
return {}
|
3aa2f858f93ed3945bf1960d5c5d1d90df34422c
|
MoodJournal/entries/serializers.py
|
MoodJournal/entries/serializers.py
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
Revert "unique for date validator"
|
Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.
|
Python
|
mit
|
swpease/MoodJournal,swpease/MoodJournal,swpease/MoodJournal
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
<commit_before>from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
<commit_msg>Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.<commit_after>
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
<commit_before>from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
<commit_msg>Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.<commit_after>from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
e394c1889eccb5806a480033dca467da51d515e5
|
scripts/test_setup.py
|
scripts/test_setup.py
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
Test dependencies: On Python 2.5, require Jinja 2.6
|
Test dependencies: On Python 2.5, require Jinja 2.6
|
Python
|
bsd-3-clause
|
omergertel/logbook,pombredanne/logbook,Rafiot/logbook,alonho/logbook,Rafiot/logbook,alonho/logbook,omergertel/logbook,Rafiot/logbook,FintanH/logbook,DasIch/logbook,DasIch/logbook,RazerM/logbook,mitsuhiko/logbook,omergertel/logbook,DasIch/logbook,dommert/logbook,alonho/logbook
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
Test dependencies: On Python 2.5, require Jinja 2.6
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
<commit_before>#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
<commit_msg>Test dependencies: On Python 2.5, require Jinja 2.6<commit_after>
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
Test dependencies: On Python 2.5, require Jinja 2.6#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
<commit_before>#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"Jinja2",
"nose",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"sqlalchemy",
"simplejson",
])
else:
deps.append("sqlalchemy")
deps.append("pyzmq")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
<commit_msg>Test dependencies: On Python 2.5, require Jinja 2.6<commit_after>#! /usr/bin/python
import platform
import subprocess
import sys
def _execute(*args, **kwargs):
result = subprocess.call(*args, **kwargs)
if result != 0:
sys.exit(result)
if __name__ == '__main__':
python_version = platform.python_version()
deps = [
"execnet",
"nose",
"sqlalchemy",
]
if python_version < "2.6":
deps.extend([
"ssl",
"multiprocessing",
"pyzmq==2.1.11",
"simplejson",
])
else:
deps.extend([
"pyzmq",
])
# Jinja2 is a bit fragmented...
if python_version < "3.3":
deps.append("Jinja2==2.6")
else:
deps.append("Jinja2")
if python_version < "2.7":
deps.append("unittest2")
print("Setting up dependencies...")
_execute("pip install %s" % " ".join(deps), shell=True)
|
52f0b4569429da6b331e150496fe2b7ebef17597
|
gssapi/__about__.py
|
gssapi/__about__.py
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.2'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.3pre'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
Prepare for next development version
|
Prepare for next development version
|
Python
|
mit
|
sigmaris/python-gssapi,sigmaris/python-gssapi,sigmaris/python-gssapi,sigmaris/python-gssapi
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.2'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
Prepare for next development version
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.3pre'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
<commit_before>from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.2'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
<commit_msg>Prepare for next development version<commit_after>
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.3pre'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.2'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
Prepare for next development versionfrom __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.3pre'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
<commit_before>from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.2'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
<commit_msg>Prepare for next development version<commit_after>from __future__ import unicode_literals
__title__ = 'python-gssapi'
__author__ = 'Hugh Cole-Baker and contributors'
__version__ = '0.6.3pre'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Copyright 2014 {0}'.format(__author__)
|
da54fa6d681ab7f2e3146b55d562e5a4d68623cc
|
luigi/tasks/export/ftp/__init__.py
|
luigi/tasks/export/ftp/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
Make GO term export part of FTP export
|
Make GO term export part of FTP export
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
Make GO term export part of FTP export
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
<commit_msg>Make GO term export part of FTP export<commit_after>
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
Make GO term export part of FTP export# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
<commit_msg>Make GO term export part of FTP export<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from .md5 import Md5Export
from .id_mapping import IdExport
from .rfam import RfamAnnotationExport
from .fasta import FastaExport
from .ensembl import EnsemblExport
from .go_annotations import GoAnnotationExport
class FtpExport(luigi.WrapperTask):
def requires(self):
yield Md5Export
yield IdExport
yield RfamAnnotationExport
yield FastaExport
yield EnsemblExport
yield GoAnnotationExport
|
eb496468d61ff3245adbdec4108a04bc40a357fc
|
Grid.py
|
Grid.py
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
Allow gridlines on both major and minor axes.
|
Allow gridlines on both major and minor axes.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
Allow gridlines on both major and minor axes.
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
<commit_before>from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
<commit_msg>Allow gridlines on both major and minor axes.<commit_after>
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
Allow gridlines on both major and minor axes.from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
<commit_before>from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style)
# Gridlines should be below plots
axes.set_axisbelow(True)
<commit_msg>Allow gridlines on both major and minor axes.<commit_after>from boomslang.LineStyle import LineStyle
class Grid(object):
def __init__(self, color="#dddddd", style="-", visible=True):
self.color = color
self._lineStyle = LineStyle()
self._lineStyle.style = style
self.visible = visible
self.which = 'major'
@property
def style(self):
return self._lineStyle.style
@style.setter
def style(self, value):
self._lineStyle.style = value
@style.getter
def style(self):
return self._lineStyle.style
def draw(self, fig, axes):
if self.visible:
axes.grid(color=self.color, linestyle=self.style,
which=self.which)
# Gridlines should be below plots
axes.set_axisbelow(True)
|
8ae82e08fc42d89402550f5f545dbaa258196c8c
|
ibmcnx/test/test.py
|
ibmcnx/test/test.py
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
Customize scripts to work with menu
|
Customize scripts to work with menu
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
Customize scripts to work with menu
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
<commit_before>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
<commit_msg>Customize scripts to work with menu<commit_after>
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
Customize scripts to work with menu#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
<commit_before>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
<commit_msg>Customize scripts to work with menu<commit_after>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
test = FilesPolicyService.browse( "title", "true", 1, 25 )
print test
|
fc42c3cf72abeb053560c21e1870e8507aa2d666
|
examples/framework/faren/faren.py
|
examples/framework/faren/faren.py
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
Use insert_text instead of changed
|
Use insert_text instead of changed
|
Python
|
lgpl-2.1
|
stoq/kiwi
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
Use insert_text instead of changed
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
<commit_before>#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
<commit_msg>Use insert_text instead of changed<commit_after>
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
Use insert_text instead of changed#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
<commit_before>#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__changed(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
<commit_msg>Use insert_text instead of changed<commit_after>#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
from kiwi.ui.gadgets import quit_if_last
class FarenControl(BaseController):
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
def after_temperature__insert_text(self, entry, *args):
try:
temp = float(entry.get_text())
except ValueError:
temp = 0
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
self.view.celsius.set_text("%.2f" % celsius)
self.view.farenheit.set_text("%.2f" % farenheit)
widgets = ["quitbutton", "temperature", "celsius", "farenheit"]
view = BaseView(gladefile="faren", delete_handler=quit_if_last,
widgets=widgets)
ctl = FarenControl(view)
view.show()
gtk.main()
|
a859890c9f17b2303061b2d68e5c58ad27e07b35
|
grizli/pipeline/__init__.py
|
grizli/pipeline/__init__.py
|
"""
Automated processing of associated exposures
"""
|
"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
Add script to fetch data from AWS
|
Add script to fetch data from AWS
|
Python
|
mit
|
gbrammer/grizli
|
"""
Automated processing of associated exposures
"""
Add script to fetch data from AWS
|
"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
<commit_before>"""
Automated processing of associated exposures
"""
<commit_msg>Add script to fetch data from AWS<commit_after>
|
"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
"""
Automated processing of associated exposures
"""
Add script to fetch data from AWS"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
<commit_before>"""
Automated processing of associated exposures
"""
<commit_msg>Add script to fetch data from AWS<commit_after>"""
Automated processing of associated exposures
"""
def fetch_from_AWS_bucket(root='j022644-044142', id=1161, product='.beams.fits', bucket_name='aws-grivam', verbose=True, dryrun=False, output_path='./', get_fit_args=False, skip_existing=True):
"""
Fetch products from the Grizli AWS bucket.
Boto3 will require that you have set up your AWS credentials in, e.g.,
~/.aws/credentials
"""
import os
import boto3
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
bkt = s3.Bucket(bucket_name)
files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}_{1:05d}{2}'.format(root, id, product))]
if get_fit_args:
files += ['Pipeline/{0}/Extractions/fit_args.npy'.format(root)]
for file in files:
local = os.path.join(output_path, os.path.basename(file))
if verbose:
print('{0} -> {1}'.format(file, output_path))
if not dryrun:
if os.path.exists(local) & skip_existing:
continue
bkt.download_file(file, local,
ExtraArgs={"RequestPayer": "requester"})
|
1859a5c4ae4f13b1eb8c586578909a943d15f673
|
invocations/docs.py
|
invocations/docs.py
|
import os
from invoke import ctask as task, Collection
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf %s" % build_dir)
@task(aliases=['b'])
def _browse(ctx):
ctx.run("open %s" % os.path.join(build_dir, 'index.html'))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
ctx.run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
|
import os
from invoke import ctask as task, Collection
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf {0}".format(ctx['sphinx.target']))
@task(aliases=['b'])
def _browse(ctx):
index = os.path.join(ctx['sphinx.target'], 'index.html')
ctx.run("open {0}".format(index))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
cmd = "sphinx-build {0} {1}".format(
ctx['sphinx.source'], ctx['sphinx.target']
)
ctx.run(cmd, pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
ns.configure({
'sphinx.source': 'docs',
# TODO: allow lazy eval so one attr can refer to another?
'sphinx.target': os.path.join('docs', '_build'),
})
|
Make use of new collection config vector
|
Make use of new collection config vector
|
Python
|
bsd-2-clause
|
alex/invocations,singingwolfboy/invocations,pyinvoke/invocations,mrjmad/invocations
|
import os
from invoke import ctask as task, Collection
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf %s" % build_dir)
@task(aliases=['b'])
def _browse(ctx):
ctx.run("open %s" % os.path.join(build_dir, 'index.html'))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
ctx.run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
Make use of new collection config vector
|
import os
from invoke import ctask as task, Collection
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf {0}".format(ctx['sphinx.target']))
@task(aliases=['b'])
def _browse(ctx):
index = os.path.join(ctx['sphinx.target'], 'index.html')
ctx.run("open {0}".format(index))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
cmd = "sphinx-build {0} {1}".format(
ctx['sphinx.source'], ctx['sphinx.target']
)
ctx.run(cmd, pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
ns.configure({
'sphinx.source': 'docs',
# TODO: allow lazy eval so one attr can refer to another?
'sphinx.target': os.path.join('docs', '_build'),
})
|
<commit_before>import os
from invoke import ctask as task, Collection
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf %s" % build_dir)
@task(aliases=['b'])
def _browse(ctx):
ctx.run("open %s" % os.path.join(build_dir, 'index.html'))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
ctx.run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
<commit_msg>Make use of new collection config vector<commit_after>
|
import os
from invoke import ctask as task, Collection
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf {0}".format(ctx['sphinx.target']))
@task(aliases=['b'])
def _browse(ctx):
index = os.path.join(ctx['sphinx.target'], 'index.html')
ctx.run("open {0}".format(index))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
cmd = "sphinx-build {0} {1}".format(
ctx['sphinx.source'], ctx['sphinx.target']
)
ctx.run(cmd, pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
ns.configure({
'sphinx.source': 'docs',
# TODO: allow lazy eval so one attr can refer to another?
'sphinx.target': os.path.join('docs', '_build'),
})
|
import os
from invoke import ctask as task, Collection
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf %s" % build_dir)
@task(aliases=['b'])
def _browse(ctx):
ctx.run("open %s" % os.path.join(build_dir, 'index.html'))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
ctx.run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
Make use of new collection config vectorimport os
from invoke import ctask as task, Collection
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf {0}".format(ctx['sphinx.target']))
@task(aliases=['b'])
def _browse(ctx):
index = os.path.join(ctx['sphinx.target'], 'index.html')
ctx.run("open {0}".format(index))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
cmd = "sphinx-build {0} {1}".format(
ctx['sphinx.source'], ctx['sphinx.target']
)
ctx.run(cmd, pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
ns.configure({
'sphinx.source': 'docs',
# TODO: allow lazy eval so one attr can refer to another?
'sphinx.target': os.path.join('docs', '_build'),
})
|
<commit_before>import os
from invoke import ctask as task, Collection
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf %s" % build_dir)
@task(aliases=['b'])
def _browse(ctx):
ctx.run("open %s" % os.path.join(build_dir, 'index.html'))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
ctx.run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
<commit_msg>Make use of new collection config vector<commit_after>import os
from invoke import ctask as task, Collection
@task(aliases=['c'])
def _clean(ctx):
ctx.run("rm -rf {0}".format(ctx['sphinx.target']))
@task(aliases=['b'])
def _browse(ctx):
index = os.path.join(ctx['sphinx.target'], 'index.html')
ctx.run("open {0}".format(index))
@task(default=True)
def build(ctx, clean=False, browse=False):
if clean:
_clean(ctx)
cmd = "sphinx-build {0} {1}".format(
ctx['sphinx.source'], ctx['sphinx.target']
)
ctx.run(cmd, pty=True)
if browse:
_browse(ctx)
ns = Collection(clean=_clean, browse=_browse, build=build)
ns.configure({
'sphinx.source': 'docs',
# TODO: allow lazy eval so one attr can refer to another?
'sphinx.target': os.path.join('docs', '_build'),
})
|
f46059285851d47a9bee2174e32e9e084efe1182
|
jirafs/constants.py
|
jirafs/constants.py
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
Remove my personal domain from the public jirafs git config.
|
Remove my personal domain from the public jirafs git config.
|
Python
|
mit
|
coddingtonbear/jirafs,coddingtonbear/jirafs
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
Remove my personal domain from the public jirafs git config.
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
<commit_before>from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
<commit_msg>Remove my personal domain from the public jirafs git config.<commit_after>
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
Remove my personal domain from the public jirafs git config.from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
<commit_before>from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@adamcoddington.net>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
<commit_msg>Remove my personal domain from the public jirafs git config.<commit_after>from jirafs import __version__ as version
# Metadata filenames
TICKET_DETAILS = 'fields.jira'
TICKET_COMMENTS = 'comments.read_only.jira'
TICKET_NEW_COMMENT = 'new_comment.jira'
TICKET_LINKS = 'links.jira'
TICKET_FILE_FIELD_TEMPLATE = u'{field_name}.jira'
# Generic settings
LOCAL_ONLY_FILE = '.jirafs_local'
REMOTE_IGNORE_FILE = '.jirafs_remote_ignore'
GIT_IGNORE_FILE_PARTIAL = '.jirafs_ignore'
GIT_IGNORE_FILE = '.jirafs/combined_ignore'
GIT_EXCLUDE_FILE = '.jirafs/git/info/exclude'
TICKET_OPERATION_LOG = 'operation.log'
METADATA_DIR = '.jirafs'
GLOBAL_CONFIG = '.jirafs_config'
GIT_AUTHOR = 'Jirafs %s <jirafs@localhost>' % (
version
)
# Config sections
CONFIG_JIRA = 'jira'
CONFIG_PLUGINS = 'plugins'
NO_DETAIL_FIELDS = [
'comment',
'watches',
'attachment'
]
FILE_FIELDS = [
'description',
]
FILE_FIELD_BLACKLIST = [
'new_comment',
'fields',
'links',
]
CURRENT_REPO_VERSION = 16
|
1690c1981614e20183d33de4d117af0aa62ae9c5
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
Modify board_slug in url regex to pass numeric letter
|
Modify board_slug in url regex to pass numeric letter
|
Python
|
mit
|
kboard/kboard,guswnsxodlf/k-board,kboard/kboard,cjh5414/kboard,hyesun03/k-board,cjh5414/kboard,hyesun03/k-board,guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board,darjeeling/k-board,cjh5414/kboard,guswnsxodlf/k-board
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Modify board_slug in url regex to pass numeric letter
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Modify board_slug in url regex to pass numeric letter<commit_after>
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
Modify board_slug in url regex to pass numeric letter# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
<commit_before># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-\w]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-\w]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-\w]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
<commit_msg>Modify board_slug in url regex to pass numeric letter<commit_after># Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<board_slug>[-a-z]+)/(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/delete/$', views.delete_comment, name='delete_comment'),
]
|
ff34a0b9ffc3fed7be9d30d65f9e8f0c24a3cf83
|
abusehelper/contrib/spamhaus/xbl.py
|
abusehelper/contrib/spamhaus/xbl.py
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.
|
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.
|
Python
|
mit
|
abusesa/abusehelper
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
<commit_before>"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
<commit_msg>Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.<commit_after>
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file."""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
<commit_before>"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
entries = []
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
entries.append(line)
self.log.info("Read %d entries" % len(entries))
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
for entry in entries:
event = events.Event()
event.add("ip", entry)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + entry)
yield idiokit.send(event)
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
<commit_msg>Make the bot to save memory by sending events as soon as it reads through the corresponding lines of the input file.<commit_after>"""
Spamhaus XBL list handler.
Maintainer: Sauli Pahlman <sauli@codenomicon.com>
"""
import idiokit
from abusehelper.core import cymruwhois, bot, events
class SpamhausXblBot(bot.PollingBot):
xbl_filepath = bot.Param("Filename of Spamhaus XBL file")
@idiokit.stream
def poll(self):
skip_chars = ["#", ":", "$"]
self.log.info("Opening %s" % self.xbl_filepath)
try:
with open(self.xbl_filepath, "r") as f:
for line in f:
line = line.strip()
if line and line[0] in skip_chars:
continue
event = events.Event()
event.add("ip", line)
event.add("description url", "http://www.spamhaus.org/query/bl?ip=" + line)
yield idiokit.send(event)
except IOError, ioe:
self.log.error("Could not open %s: %s" % (self.xbl_filepath, ioe))
if __name__ == "__main__":
SpamhausXblBot.from_command_line().execute()
|
0d73cc1b38703653c3302d8f9ff4efbeaaa2b406
|
credentials/apps/records/models.py
|
credentials/apps/records/models.py
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
Revert early removal of certificate field
|
Revert early removal of certificate field
|
Python
|
agpl-3.0
|
edx/credentials,edx/credentials,edx/credentials,edx/credentials
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
Revert early removal of certificate field
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
<commit_before>"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
<commit_msg>Revert early removal of certificate field<commit_after>
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
Revert early removal of certificate field"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
<commit_before>"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
<commit_msg>Revert early removal of certificate field<commit_after>"""
Models for the records app.
"""
import uuid
from django.db import models
from django_extensions.db.models import TimeStampedModel
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.models import User
from credentials.apps.credentials.models import ProgramCertificate
class UserGrade(TimeStampedModel):
"""
A grade for a specific user and course run
"""
username = models.CharField(max_length=150, blank=False)
course_run = models.ForeignKey(CourseRun)
letter_grade = models.CharField(max_length=255, blank=True)
percent_grade = models.DecimalField(max_digits=5, decimal_places=4, null=False)
verified = models.BooleanField(verbose_name='Verified Learner ID', default=True)
class Meta(object):
unique_together = ('username', 'course_run')
class ProgramCertRecord(TimeStampedModel):
"""
Connects a User with a Program
"""
certificate = models.ForeignKey(ProgramCertificate, null=True)
program = models.ForeignKey(Program, null=True)
user = models.ForeignKey(User)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
def __str__(self):
return 'ProgramCertificateRecord: {uuid}'.format(uuid=self.uuid)
class Meta(object):
verbose_name = "A viewable record of a program"
|
14482126c8d26e4d822a55d525ff276953adbaff
|
src/som/primitives/symbol_primitives.py
|
src/som/primitives/symbol_primitives.py
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 == op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 is op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
Fix Symbol equality to be reference equal
|
Fix Symbol equality to be reference equal
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
Python
|
mit
|
smarr/PySOM,SOM-st/PySOM,SOM-st/RPySOM,SOM-st/PySOM,SOM-st/RPySOM,smarr/PySOM
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 == op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
Fix Symbol equality to be reference equal
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 is op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
<commit_before>from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 == op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
<commit_msg>Fix Symbol equality to be reference equal
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 is op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 == op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
Fix Symbol equality to be reference equal
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 is op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
<commit_before>from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 == op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
<commit_msg>Fix Symbol equality to be reference equal
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>from som.primitives.primitives import Primitives
from som.vmobjects.primitive import Primitive
def _asString(ivkbl, frame, interpreter):
rcvr = frame.pop()
frame.push(interpreter.get_universe().new_string(rcvr.get_embedded_string()))
def _equals(ivkbl, frame, interpreter):
op1 = frame.pop()
op2 = frame.pop() # rcvr
universe = interpreter.get_universe()
if op1 is op2:
frame.push(universe.trueObject)
else:
frame.push(universe.falseObject)
class SymbolPrimitives(Primitives):
def install_primitives(self):
self._install_instance_primitive(Primitive("asString", self._universe,
_asString))
self._install_instance_primitive(Primitive("=", self._universe, _equals), False)
|
efd44be24e84a35db353ac79dae7cc7392a18b0c
|
matador/commands/deploy_ticket.py
|
matador/commands/deploy_ticket.py
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
Add ticket and branch arguments
|
Add ticket and branch arguments
|
Python
|
mit
|
Empiria/matador
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
Add ticket and branch arguments
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
<commit_before>#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
<commit_msg>Add ticket and branch arguments<commit_after>
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
Add ticket and branch arguments#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
<commit_before>#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project)
self._checkout_ticket(project, 'test')
<commit_msg>Add ticket and branch arguments<commit_after>#!/usr/bin/env python
from .command import Command
from matador import utils
import subprocess
import os
class DeployTicket(Command):
def _add_arguments(self, parser):
parser.prog = 'matador deploy-ticket'
parser.add_argument(
'-e', '--environment',
type=str,
required=True,
help='Agresso environment name')
parser.add_argument(
'-t', '--ticket',
type=str,
required=True,
help='Ticket name')
parser.add_argument(
'-b', '--branch',
type=str,
default='master',
help='Branch name')
parser.add_argument(
'-', '--package',
type=bool,
default=False,
help='Agresso environment name')
def _checkout_ticket(self, project, ticket, branch='master'):
repo_folder = utils.matador_repository_folder(project)
subprocess.run([
'git', '-C', repo_folder, 'checkout', branch],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w'))
def _execute(self):
project = utils.project()
if not self.args.package:
utils.update_repository(project, self.args.branch)
self._checkout_ticket(project, self.args.ticket)
|
6795e02c14fa99da2c0812fe6694bbd503f89ad1
|
tests/mock_vws/test_invalid_given_id.py
|
tests/mock_vws/test_invalid_given_id.py
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
Use any_endpoint on invalid id test
|
Use any_endpoint on invalid id test
|
Python
|
mit
|
adamtheturtle/vws-python,adamtheturtle/vws-python
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
Use any_endpoint on invalid id test
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
<commit_before>"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
<commit_msg>Use any_endpoint on invalid id test<commit_after>
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
Use any_endpoint on invalid id test"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
<commit_before>"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
<commit_msg>Use any_endpoint on invalid id test<commit_after>"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import pytest
import requests
from requests import codes
from mock_vws._constants import ResultCodes
from tests.mock_vws.utils import (
TargetAPIEndpoint,
VuforiaDatabaseKeys,
assert_vws_failure,
delete_target,
)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(
self,
vuforia_database_keys: VuforiaDatabaseKeys,
any_endpoint: TargetAPIEndpoint,
target_id: str,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
endpoint = any_endpoint
if not endpoint.prepared_request.path_url.endswith(target_id):
return
delete_target(
vuforia_database_keys=vuforia_database_keys,
target_id=target_id,
)
session = requests.Session()
response = session.send( # type: ignore
request=endpoint.prepared_request,
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
4eda3f3535d28e2486745f33504c417ba6837c3a
|
stdnum/nz/__init__.py
|
stdnum/nz/__init__.py
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
Add missing vat alias for New Zealand
|
Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202
|
Python
|
lgpl-2.1
|
arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
<commit_before># __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
<commit_msg>Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202<commit_after>
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202# __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
<commit_before># __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
<commit_msg>Add missing vat alias for New Zealand
Closes https://github.com/arthurdejong/python-stdnum/pull/202<commit_after># __init__.py - collection of New Zealand numbers
# coding: utf-8
#
# Copyright (C) 2019 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of New Zealand numbers."""
# provide aliases
from stdnum.nz import ird as vat # noqa: F401
|
9e75c0b90d15222af3089d2362c28be726861558
|
nbresuse/handlers.py
|
nbresuse/handlers.py
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('LIMIT_MEM', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
Use final Memory limit env variable name
|
Use final Memory limit env variable name
|
Python
|
bsd-2-clause
|
allanlwu/allangdrive,yuvipanda/nbresuse,allanlwu/allangdrive,yuvipanda/nbresuse
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('LIMIT_MEM', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
Use final Memory limit env variable name
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
<commit_before>import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('LIMIT_MEM', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
<commit_msg>Use final Memory limit env variable name<commit_after>
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('LIMIT_MEM', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
Use final Memory limit env variable nameimport os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
<commit_before>import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('LIMIT_MEM', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
<commit_msg>Use final Memory limit env variable name<commit_after>import os
import json
import psutil
from notebook.utils import url_path_join
from notebook.base.handlers import IPythonHandler
def get_metrics():
cur_process = psutil.Process()
all_processes = [cur_process] + cur_process.children(recursive=True)
rss = sum([p.memory_info().rss for p in all_processes])
return {
'rss': rss,
'limits': {
'memory': int(os.environ.get('MEM_LIMIT', None))
}
}
class MetricsHandler(IPythonHandler):
def get(self):
self.finish(json.dumps(get_metrics()))
def setup_handlers(web_app):
route_pattern = url_path_join(web_app.settings['base_url'], '/metrics')
web_app.add_handlers('.*', [(route_pattern, MetricsHandler)])
|
ea3a72443f2fa841ea0bc73ec461968c447f39c1
|
egg_timer/apps/utils/management/commands/check_requirements.py
|
egg_timer/apps/utils/management/commands/check_requirements.py
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
Revert "Added a prod option to the rquirements checker"
|
Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.
|
Python
|
mit
|
jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server,jessamynsmith/eggtimer-server
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
<commit_before>import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
<commit_msg>Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.<commit_after>
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
<commit_before>import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def _get_file_contents(self, name):
req_file = open('requirements/%s.txt' % name)
reqs = req_file.read()
req_file.close()
req_list = reqs.split('\n')
if req_list[0].startswith('-r'):
req_list = req_list[1:]
return req_list
def handle(self, *args, **options):
check_prod = False
if len(args) == 1:
if args[0] == 'prod':
check_prod = True
else:
print "Unrecognized option %s; defaulting to checking dev requirements." % args[0]
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
req_list = self._get_file_contents('common')
if check_prod:
req_list.extend(self._get_file_contents('prod'))
else:
req_list.extend(self._get_file_contents('dev'))
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
for req_item in req_list:
if req_item not in freeze_results:
print "Required item is not installed: %s" % req_item
<commit_msg>Revert "Added a prod option to the rquirements checker"
This reverts commit 5b9ae76d157d068ef456d5caa5c4352a139f528b.<commit_after>import subprocess
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Ensure that all installed packages are in requirements.txt'
def handle(self, *args, **options):
proc = subprocess.Popen(['pip', 'freeze'], stdout=subprocess.PIPE)
freeze_results = proc.communicate()[0].split('\n')
common_file = open('requirements/common.txt')
reqs = common_file.read()
common_file.close()
req_list = reqs.split('\n')
dev_file = open('requirements/dev.txt')
reqs = dev_file.read()
dev_file.close()
req_list.extend(reqs.split('\n')[1:])
sorted(freeze_results)
sorted(req_list)
for freeze_item in freeze_results:
if freeze_item not in req_list:
print "Item is missing from requirements files: %s" % freeze_item
|
8014285e5dc8fb13377b729f9fd19b4187fbaf29
|
fireplace/carddata/spells/other.py
|
fireplace/carddata/spells/other.py
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
Implement Adrenaline Rush why not
|
Implement Adrenaline Rush why not
|
Python
|
agpl-3.0
|
Ragowit/fireplace,butozerca/fireplace,Meerkov/fireplace,smallnamespace/fireplace,amw2104/fireplace,liujimj/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,jleclanche/fireplace,liujimj/fireplace,Meerkov/fireplace,beheh/fireplace,smallnamespace/fireplace,amw2104/fireplace
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
Implement Adrenaline Rush why not
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
<commit_before>from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
<commit_msg>Implement Adrenaline Rush why not<commit_after>
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
Implement Adrenaline Rush why notfrom ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
<commit_before>from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
<commit_msg>Implement Adrenaline Rush why not<commit_after>from ..card import *
# The Coin
class GAME_005(Card):
def action(self):
self.controller.tempMana += 1
# RFG
# Adrenaline Rush
class NEW1_006(Card):
action = drawCard
combo = drawCards(2)
|
9410ceb83d85d70a484bbf08ecc274216fa0589f
|
mythril/support/source_support.py
|
mythril/support/source_support.py
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
Add documentation for Source class
|
Add documentation for Source class
|
Python
|
mit
|
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
Add documentation for Source class
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
<commit_before>from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
<commit_msg>Add documentation for Source class<commit_after>
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
Add documentation for Source classfrom mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
<commit_before>from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
<commit_msg>Add documentation for Source class<commit_after>from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
"""Class to handle to source data"""
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
"""
:param source_type: whether it is a solidity-file or evm-bytecode
:param source_format: whether it is bytecode, ethereum-address or text
:param source_list: List of files
:param meta: meta data
"""
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
"""
get the source data from the contracts list
:param contracts: the list of contracts
:return:
"""
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
|
db46374695aed370aa8d8a51c34043d6a48a702d
|
waldo/tests/unit/test_contrib_config.py
|
waldo/tests/unit/test_contrib_config.py
|
# pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
# pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
Add common ignore list per README
|
Add common ignore list per README
|
Python
|
apache-2.0
|
checkmate/simpl,ryandub/simpl,ziadsawalha/simpl,samstav/simpl,larsbutler/simpl
|
# pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
Add common ignore list per README
|
# pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before># pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add common ignore list per README<commit_after>
|
# pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
# pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
Add common ignore list per README# pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before># pylint: disable=R0904,W0212
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add common ignore list per README<commit_after># pylint: disable=C0103,C0111,R0903,R0904,W0212,W0232
# Copyright (c) 2011-2013 Rackspace Hosting
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for common config."""
import unittest
from waldo.contrib import config
class TestParsers(unittest.TestCase):
def test_comma_separated_strings(self):
expected = ['1', '2', '3']
result = config.comma_separated_strings("1,2,3")
self.assertItemsEqual(result, expected)
def test_format_comma_separated_pairs(self):
expected = dict(A='1', B='2', C='3')
result = config.comma_separated_pairs("A=1,B=2,C=3")
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
ad98e3c25434dc251fe6d7ace3acfe418a4d8955
|
simplekv/db/mongo.py
|
simplekv/db/mongo.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
Fix Python3s lack of .next().
|
Fix Python3s lack of .next().
|
Python
|
mit
|
mbr/simplekv,karteek/simplekv,mbr/simplekv,fmarczin/simplekv,karteek/simplekv,fmarczin/simplekv
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
Fix Python3s lack of .next().
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
<commit_msg>Fix Python3s lack of .next().<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
Fix Python3s lack of .next().#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = self.db[self.collection].find({"_id": key}).next()
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
<commit_msg>Fix Python3s lack of .next().<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import KeyValueStore
from .._compat import BytesIO
from .._compat import pickle
from bson.binary import Binary
class MongoStore(KeyValueStore):
"""Uses a MongoDB collection as the backend, using pickle as a serializer.
:param db: A (already authenticated) pymongo database.
:param collection: A MongoDB collection name.
"""
def __init__(self, db, collection):
self.db = db
self.collection = collection
def _has_key(self, key):
return self.db[self.collection].find({"_id": key}).count() > 0
def _delete(self, key):
return self.db[self.collection].remove({"_id": key})
def _get(self, key):
try:
item = next(self.db[self.collection].find({"_id": key}))
return pickle.loads(item["v"])
except StopIteration:
raise KeyError(key)
def _open(self, key):
return BytesIO(self._get(key))
def _put(self, key, value):
self.db[self.collection].update(
{"_id": key},
{"$set": {"v": Binary(pickle.dumps(value))}},
upsert=True)
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def iter_keys(self):
for item in self.db[self.collection].find():
yield item["_id"]
|
b61188d2a842c4bde0b5e5c14d60db86806b0502
|
pdftools/__init__.py
|
pdftools/__init__.py
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "1.1.4"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "2.0.0"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
Update to new mayor version
|
Update to new mayor version
|
Python
|
mit
|
MrLeeh/pdftools
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "1.1.4"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
Update to new mayor version
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "2.0.0"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
<commit_before>"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "1.1.4"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
<commit_msg>Update to new mayor version<commit_after>
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "2.0.0"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "1.1.4"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
Update to new mayor version"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "2.0.0"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
<commit_before>"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "1.1.4"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
<commit_msg>Update to new mayor version<commit_after>"""
pdftools Package.
:author: Stefan Lehmann <stlm@posteo.de>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:35:21
:last modified by: Stefan Lehmann
:last modified time: 2018-10-28 16:57:24
"""
__version__ = "2.0.0"
from .pdftools import (
pdf_merge,
pdf_rotate,
pdf_split,
pdf_zip,
pdf_insert,
pdf_remove,
pdf_add,
)
|
f87a923678f5d7e9f6390ffcb42eae6b2a0f9cc2
|
services/views.py
|
services/views.py
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
Use separate API key for feedback about app.
|
Use separate API key for feedback about app.
|
Python
|
agpl-3.0
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
Use separate API key for feedback about app.
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
<commit_before>import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
<commit_msg>Use separate API key for feedback about app.<commit_after>
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
Use separate API key for feedback about app.import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
<commit_before>import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
outgoing['api_key'] = settings.OPEN311['API_KEY']
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
<commit_msg>Use separate API key for feedback about app.<commit_after>import json
import requests
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from .patch_ssl import get_session
@csrf_exempt
def post_service_request(request):
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get('internal_feedback', False):
if 'internal_feedback' in outgoing:
del outgoing['internal_feedback']
api_key = settings.OPEN311['INTERNAL_FEEDBACK_API_KEY']
else:
api_key = settings.OPEN311['API_KEY']
outgoing['api_key'] = api_key
url = settings.OPEN311['URL_BASE']
session = get_session()
r = session.post(url, data=outgoing)
if r.status_code != 200:
return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")
|
b92c1caa8e19376c17f503de1464d4466e547cdf
|
api/base/content_negotiation.py
|
api/base/content_negotiation.py
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
Select third renderer if 'text/html' in accept
|
Select third renderer if 'text/html' in accept
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,arpitar/osf.io,TomHeatwole/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,doublebits/osf.io,crcresearch/osf.io,amyshi188/osf.io,acshi/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,kwierman/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,njantrania/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,doublebits/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,samchrisinger/osf.io,crcresearch/osf.io,mluke93/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,abought/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,kwierman/osf.io,samanehsan/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,baylee-d/osf.io,mattclark/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,kch8qx/osf.io,aaxelb/osf.io,zachjanicki/osf.io,cslzchen/osf.io,chennan47/osf.io,cwisecarver/osf.io,mluo613/osf.io,emetsger/osf.io,binoculars/osf.io,sloria/osf.io,danielneis/osf.io,leb2dg/osf.io,laurenrevere/osf.io,zamattiac/osf.io,ckc6cz/osf.io,rdhyee/osf.io,acshi/osf.io,petermalcolm/osf.io,SSJohns/osf.io,acshi/osf.io,billyhunt/osf.io,amyshi188/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,cosenal/osf.io,abought/osf.io,arpitar/osf.io,mluke93/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,SSJohns/osf.io,KAsante95/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,binoculars/osf.io,caseyrollins/osf.io,arpitar/osf.io,felliott/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,emetsger/osf.io,MerlinZhang/osf.io,cosenal/osf.io,baylee-d/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,chennan47/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,pattisdr/osf.io,sloria/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,hmoco/osf.io,ckc6cz/osf.io,chrisseto/osf.io,mfraezz/osf.io,leb2dg/osf.io,kch8qx/osf.io,kch8qx/osf.io,hmoco/osf.io,mluke93/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,kch8qx/osf.io,billyhunt/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,caneruguz/osf.io,TomBaxter/osf.io,wearpants/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,felliott/osf.io,mattclark/osf.io,aaxelb/osf.io,saradbowman/osf.io,adlius/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,brandonPurvis/osf.io,adlius/osf.io,alexschiller/osf.io,acshi/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,zachjanicki/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,doublebits/osf.io,caseyrygt/osf.io,abought/osf.io,mluo613/osf.io,jnayak1/osf.io,TomBaxter/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,zamattiac/osf.io,mluo613/osf.io,icereval/osf.io,zamattiac/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,njantrania/osf.io,SSJohns/osf.io,acshi/osf.io,saradbowman/osf.io,billyhunt/osf.io,leb2dg/osf.io,arpitar/osf.io,billyhunt/osf.io,mluo613/osf.io,alexschiller/osf.io,RomanZWang/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,jmcarp/osf.io,chennan47/osf.io,KAsante95/osf.io,GageGaskins/osf.io,jmcarp/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,erinspace/osf.io,amyshi188/osf.io,mfraezz/osf.io,GageGaskins/osf.io,mfraezz/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,mattclark/osf.io,KAsante95/osf.io,adlius/osf.io,chrisseto/osf.io,kwierman/osf.io,rdhyee/osf.io,erinspace/osf.io,danielneis/osf.io,caseyrygt/osf.io,aaxelb/osf.io,mluo613/osf.io,doublebits/osf.io,Ghalko/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,abought/osf.io,njantrania/osf.io,crcresearch/osf.io,KAsante95/osf.io,Ghalko/osf.io,caseyrygt/osf.io,sbt9uc/osf.io,erinspace/osf.io,chrisseto/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,icereval/osf.io,MerlinZhang/osf.io,wearpants/osf.io,chrisseto/osf.io,danielneis/osf.io,cosenal/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,njantrania/osf.io,SSJohns/osf.io,GageGaskins/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,kwierman/osf.io,leb2dg/osf.io,samchrisinger/osf.io,caneruguz/osf.io,samanehsan/osf.io,felliott/osf.io,samanehsan/osf.io,icereval/osf.io,jnayak1/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,Ghalko/osf.io,caseyrollins/osf.io,adlius/osf.io,ckc6cz/osf.io,brianjgeiger/osf.io,wearpants/osf.io,jnayak1/osf.io,cosenal/osf.io,sbt9uc/osf.io,danielneis/osf.io,mluke93/osf.io,sloria/osf.io,Ghalko/osf.io,monikagrabowska/osf.io
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)Select third renderer if 'text/html' in accept
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
<commit_before>from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)<commit_msg>Select third renderer if 'text/html' in accept<commit_after>
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)Select third renderer if 'text/html' in acceptfrom rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
<commit_before>from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the first renderer in the `.renderer_classes` list.
"""
return (renderers[0], renderers[0].media_type)<commit_msg>Select third renderer if 'text/html' in accept<commit_after>from rest_framework.negotiation import BaseContentNegotiation
class CustomClientContentNegotiation(BaseContentNegotiation):
def select_parser(self, request, parsers):
"""
Select the first parser in the `.parser_classes` list.
"""
return parsers[0]
def select_renderer(self, request, renderers, format_suffix):
"""
Select the third renderer in the `.renderer_classes` list for the browsable API,
otherwise use the first renderer which has media_type "application/vnd.api+json"
"""
if 'text/html' in request.META['HTTP_ACCEPT'] :
return (renderers[2], renderers[2].media_type)
return (renderers[0], renderers[0].media_type)
|
69de2261c30a8bab1ac4d0749cf32baec49e0cc4
|
webapp/byceps/blueprints/board/views.py
|
webapp/byceps/blueprints/board/views.py
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
Throw 404 if category/topic with given id is not found.
|
Throw 404 if category/topic with given id is not found.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
Throw 404 if category/topic with given id is not found.
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
<commit_msg>Throw 404 if category/topic with given id is not found.<commit_after>
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
Throw 404 if category/topic with given id is not found.# -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
<commit_before># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
<commit_msg>Throw 404 if category/topic with given id is not found.<commit_after># -*- coding: utf-8 -*-
"""
byceps.blueprints.board.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2014 Jochen Kupperschmidt
"""
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
b890c9046d36687a65d46be724cfaa8726417b5d
|
selectable/tests/runtests.py
|
selectable/tests/runtests.py
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Add SITE_ID to test settings setup for Django 1.3.
|
Add SITE_ID to test settings setup for Django 1.3.
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,affan2/django-selectable,makinacorpus/django-selectable,makinacorpus/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
Add SITE_ID to test settings setup for Django 1.3.
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
<commit_msg>Add SITE_ID to test settings setup for Django 1.3.<commit_after>
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
Add SITE_ID to test settings setup for Django 1.3.#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
<commit_msg>Add SITE_ID to test settings setup for Django 1.3.<commit_after>#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'selectable',
),
SITE_ID=1,
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
test_args = ['selectable']
parent = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", )
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
2d3b899011c79324195a36aaf3bd53dae6abe961
|
seleniumrequests/__init__.py
|
seleniumrequests/__init__.py
|
from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
|
from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"
|
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"
|
Python
|
mit
|
cryzed/Selenium-Requests
|
from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"
|
from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
<commit_before>from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
<commit_msg>Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"<commit_after>
|
from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
<commit_before>from selenium.webdriver import Firefox, Chrome, Ie, Edge, Opera, Safari, BlackBerry, PhantomJS, Android, Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, Firefox):
pass
class Chrome(RequestsSessionMixin, Chrome):
pass
class Ie(RequestsSessionMixin, Ie):
pass
class Edge(RequestsSessionMixin, Edge):
pass
class Opera(RequestsSessionMixin, Opera):
pass
class Safari(RequestsSessionMixin, Safari):
pass
class BlackBerry(RequestsSessionMixin, BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, PhantomJS):
pass
class Android(RequestsSessionMixin, Android):
pass
class Remote(RequestsSessionMixin, Remote):
pass
<commit_msg>Fix PyCharm warnings like this: "Cannot find reference `request` in `PhantomJS | WebDriver`"<commit_after>from selenium.webdriver import _Firefox, _Chrome, _Ie, _Edge, _Opera, _Safari, _BlackBerry, _PhantomJS, _Android, \
_Remote
from seleniumrequests.request import RequestsSessionMixin
class Firefox(RequestsSessionMixin, _Firefox):
pass
class Chrome(RequestsSessionMixin, _Chrome):
pass
class Ie(RequestsSessionMixin, _Ie):
pass
class Edge(RequestsSessionMixin, _Edge):
pass
class Opera(RequestsSessionMixin, _Opera):
pass
class Safari(RequestsSessionMixin, _Safari):
pass
class BlackBerry(RequestsSessionMixin, _BlackBerry):
pass
class PhantomJS(RequestsSessionMixin, _PhantomJS):
pass
class Android(RequestsSessionMixin, _Android):
pass
class Remote(RequestsSessionMixin, _Remote):
pass
|
1e0327c852b851f867d21a182ba7604b42d15331
|
examples/charts/file/stacked_bar.py
|
examples/charts/file/stacked_bar.py
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
Update stacked bar example to use the hover kwarg.
|
Update stacked bar example to use the hover kwarg.
|
Python
|
bsd-3-clause
|
Karel-van-de-Plassche/bokeh,rs2/bokeh,jakirkham/bokeh,msarahan/bokeh,DuCorey/bokeh,schoolie/bokeh,schoolie/bokeh,quasiben/bokeh,timsnyder/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,stonebig/bokeh,bokeh/bokeh,ericmjl/bokeh,bokeh/bokeh,aavanian/bokeh,dennisobrien/bokeh,clairetang6/bokeh,DuCorey/bokeh,ericmjl/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,justacec/bokeh,ptitjano/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,dennisobrien/bokeh,justacec/bokeh,aiguofer/bokeh,draperjames/bokeh,ptitjano/bokeh,jakirkham/bokeh,azjps/bokeh,mindriot101/bokeh,philippjfr/bokeh,mindriot101/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,timsnyder/bokeh,ptitjano/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,dennisobrien/bokeh,aiguofer/bokeh,schoolie/bokeh,phobson/bokeh,mindriot101/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,draperjames/bokeh,bokeh/bokeh,phobson/bokeh,aavanian/bokeh,philippjfr/bokeh,ericmjl/bokeh,clairetang6/bokeh,draperjames/bokeh,percyfal/bokeh,percyfal/bokeh,aavanian/bokeh,mindriot101/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,bokeh/bokeh,percyfal/bokeh,philippjfr/bokeh,ptitjano/bokeh,aavanian/bokeh,quasiben/bokeh,percyfal/bokeh,msarahan/bokeh,DuCorey/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,phobson/bokeh,timsnyder/bokeh,timsnyder/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,justacec/bokeh,schoolie/bokeh,stonebig/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,clairetang6/bokeh,azjps/bokeh,jakirkham/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,msarahan/bokeh,dennisobrien/bokeh,quasiben/bokeh,percyfal/bokeh,aiguofer/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,philippjfr/bokeh,clairetang6/bokeh,stonebig/bokeh,azjps/bokeh,dennisobrien/bokeh,aiguofer/bokeh
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
Update stacked bar example to use the hover kwarg.
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
<commit_before>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
<commit_msg>Update stacked bar example to use the hover kwarg.<commit_after>
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
Update stacked bar example to use the hover kwarg.from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
<commit_before>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
from bokeh.models.tools import HoverTool
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals")
bar.add_tools(HoverTool(tooltips=[('medal', '@medal'), ('country', '@abbr')]))
output_file("stacked_bar.html")
show(bar)
<commit_msg>Update stacked bar example to use the hover kwarg.<commit_after>from bokeh.charts import Bar, output_file, show
from bokeh.charts.operations import blend
from bokeh.charts.attributes import cat, color
from bokeh.charts.utils import df_from_json
from bokeh.sampledata.olympics2014 import data
# utilize utility to make it easy to get json/dict data converted to a dataframe
df = df_from_json(data)
# filter by countries with at least one medal and sort by total medals
df = df[df['total'] > 0]
df = df.sort("total", ascending=False)
bar = Bar(df,
values=blend('bronze', 'silver', 'gold', name='medals', labels_name='medal'),
label=cat(columns='abbr', sort=False),
stack=cat(columns='medal', sort=False),
color=color(columns='medal', palette=['SaddleBrown', 'Silver', 'Goldenrod'],
sort=False),
legend='top_right',
title="Medals per Country, Sorted by Total Medals",
hover=[('medal', '@medal'), ('country', '@abbr')])
output_file("stacked_bar.html")
show(bar)
|
05419e49c438c3f867c1ab4bd37021755ec09332
|
skimage/exposure/__init__.py
|
skimage/exposure/__init__.py
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
Make unwrap visible in the exposure package.
|
Make unwrap visible in the exposure package.
|
Python
|
bsd-3-clause
|
SamHames/scikit-image,ClinicalGraphics/scikit-image,chintak/scikit-image,bennlich/scikit-image,robintw/scikit-image,rjeli/scikit-image,youprofit/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,blink1073/scikit-image,youprofit/scikit-image,GaZ3ll3/scikit-image,Britefury/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,paalge/scikit-image,almarklein/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,SamHames/scikit-image,almarklein/scikit-image,michaelaye/scikit-image,newville/scikit-image,Hiyorimi/scikit-image,jwiggins/scikit-image,Midafi/scikit-image,robintw/scikit-image,bsipocz/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,paalge/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,SamHames/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,Midafi/scikit-image,paalge/scikit-image,bsipocz/scikit-image,oew1v07/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,newville/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,WarrenWeckesser/scikits-image,keflavich/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
Make unwrap visible in the exposure package.
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
<commit_before>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
<commit_msg>Make unwrap visible in the exposure package.<commit_after>
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
Make unwrap visible in the exposure package.from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
<commit_before>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log']
<commit_msg>Make unwrap visible in the exposure package.<commit_after>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution, \
adjust_gamma, adjust_sigmoid, adjust_log
from ._adapthist import equalize_adapthist
from .unwrap import unwrap
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution',
'adjust_gamma',
'adjust_sigmoid',
'adjust_log',
'unwrap']
|
a4c2b68a69d89a293568fb257b4a8c0549a5ef9b
|
solitude/settings/sites/dev/db.py
|
solitude/settings/sites/dev/db.py
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
BANGO_PROXY = private.BANGO_PROXY
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
# Swap these around when bug 831576 is fixed.
# Speak to Bango directly.
BANGO_ENV = 'test'
BANGO_AUTH = private.BANGO_AUTH
# Use the proxy.
#BANGO_PROXY = private.BANGO_PROXY
|
Revert "turn proxy back on"
|
Revert "turn proxy back on"
This reverts commit c5b3a15a2815ff362104afaa6a996fab6f35ae1a.
|
Python
|
bsd-3-clause
|
muffinresearch/solitude,muffinresearch/solitude
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
BANGO_PROXY = private.BANGO_PROXY
Revert "turn proxy back on"
This reverts commit c5b3a15a2815ff362104afaa6a996fab6f35ae1a.
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
# Swap these around when bug 831576 is fixed.
# Speak to Bango directly.
BANGO_ENV = 'test'
BANGO_AUTH = private.BANGO_AUTH
# Use the proxy.
#BANGO_PROXY = private.BANGO_PROXY
|
<commit_before>"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
BANGO_PROXY = private.BANGO_PROXY
<commit_msg>Revert "turn proxy back on"
This reverts commit c5b3a15a2815ff362104afaa6a996fab6f35ae1a.<commit_after>
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
# Swap these around when bug 831576 is fixed.
# Speak to Bango directly.
BANGO_ENV = 'test'
BANGO_AUTH = private.BANGO_AUTH
# Use the proxy.
#BANGO_PROXY = private.BANGO_PROXY
|
"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
BANGO_PROXY = private.BANGO_PROXY
Revert "turn proxy back on"
This reverts commit c5b3a15a2815ff362104afaa6a996fab6f35ae1a."""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
# Swap these around when bug 831576 is fixed.
# Speak to Bango directly.
BANGO_ENV = 'test'
BANGO_AUTH = private.BANGO_AUTH
# Use the proxy.
#BANGO_PROXY = private.BANGO_PROXY
|
<commit_before>"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
BANGO_PROXY = private.BANGO_PROXY
<commit_msg>Revert "turn proxy back on"
This reverts commit c5b3a15a2815ff362104afaa6a996fab6f35ae1a.<commit_after>"""private_base will be populated from puppet and placed in this directory"""
import logging
import dj_database_url
import private_base as private
ADMINS = ()
DATABASES = {}
DATABASES['default'] = dj_database_url.parse(private.DATABASES_DEFAULT_URL)
DATABASES['default']['ENGINE'] = 'django.db.backends.mysql'
DATABASES['default']['OPTIONS'] = {'init_command': 'SET storage_engine=InnoDB'}
DEBUG = False
DEBUG_PROPAGATE_EXCEPTIONS = False
HMAC_KEYS = private.HMAC_KEYS
LOG_LEVEL = logging.DEBUG
SECRET_KEY = private.SECRET_KEY
SENTRY_DSN = private.SENTRY_DSN
STATSD_HOST = private.STATSD_HOST
STATSD_PORT = private.STATSD_PORT
STATSD_PREFIX = private.STATSD_PREFIX
SYSLOG_TAG = 'http_app_payments_dev'
TEMPLATE_DEBUG = DEBUG
# Solitude specific settings.
AES_KEYS = private.AES_KEYS
CLEANSED_SETTINGS_ACCESS = True
CLIENT_JWT_KEYS = private.CLIENT_JWT_KEYS
PAYPAL_PROXY = private.PAYPAL_PROXY
PAYPAL_URL_WHITELIST = ('https://marketplace-dev.allizom.org',)
# Swap these around when bug 831576 is fixed.
# Speak to Bango directly.
BANGO_ENV = 'test'
BANGO_AUTH = private.BANGO_AUTH
# Use the proxy.
#BANGO_PROXY = private.BANGO_PROXY
|
27e137ef5f3b6c4f6c8679edc6412b2c237b8fb4
|
plasmapy/physics/tests/test_parameters_cython.py
|
plasmapy/physics/tests/test_parameters_cython.py
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
Update tests for cython parameters
|
Update tests for cython parameters
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStrUpdate tests for cython parameters
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
<commit_before>"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr<commit_msg>Update tests for cython parameters<commit_after>
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStrUpdate tests for cython parameters"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
<commit_before>"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from ...utils.exceptions import RelativityWarning, RelativityError
from ...utils.exceptions import PhysicsError
from ...constants import c, m_p, m_e, e, mu0
from ..parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr<commit_msg>Update tests for cython parameters<commit_after>"""Tests for functions that calculate plasma parameters using cython."""
import numpy as np
import pytest
from astropy import units as u
from warnings import simplefilter
from plasmapy.utils.exceptions import RelativityWarning, RelativityError
from plasmapy.utils.exceptions import PhysicsError
from plasmapy.constants import c, m_p, m_e, e, mu0
from plasmapy.physics.parameters_cython import (thermal_speed,
)
def test_thermal_speed():
r"""Test for cythonized version of thermal_speed()."""
trueVal = 593083.619464999
T = 11604
methodVal = thermal_speed(T, particle="e", method="most_probable")
testTrue = np.isclose(methodVal,
trueVal,
rtol=0.0,
atol=1e-16)
exceptStr = f'Thermal speed value is {methodVal}, should be {trueVal}.'
assert testTrue, exceptStr
|
81bb47c28af70936be76f319ba780f2ad89ba2a0
|
Train_SDAE/tools/evaluate_model.py
|
Train_SDAE/tools/evaluate_model.py
|
import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
import numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
Support for variable number of layers
|
Support for variable number of layers
|
Python
|
apache-2.0
|
glrs/StackedDAE,glrs/StackedDAE
|
import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
Support for variable number of layers
|
import numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
<commit_before>import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
<commit_msg>Support for variable number of layers<commit_after>
|
import numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
Support for variable number of layersimport numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
<commit_before>import numpy as np
# import pandas as pd
# import sys
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(nHLayers, exp_data, labels, *args):
print len(args[0]), len(args[0][0]), len(args[0][1])
print len(args[0][2])
print "NewLine!\n", len(args[0][3])
print "NewLine!\n", len(args[0][4])
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
for i in range(nHLayers):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(args[0][i]).shape, np.asarray(args[0][nHLayers + i]).shape
act = get_activations(act.T, args[0][i], args[0][nHLayers + i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
<commit_msg>Support for variable number of layers<commit_after>import numpy as np
from scipy.special import expit
from sklearn import ensemble
def get_activations(exp_data, w, b):
exp_data = np.transpose(exp_data)
prod = exp_data.dot(w)
prod_with_bias = prod + b
return( expit(prod_with_bias) )
# Order of *args: first all the weights and then all the biases
def run_random_forest(exp_data, labels, weights, biases, n_layers=None):
assert len(exp_data) == len(labels)
# I think they should be already transposed when running the code. Will see
act = exp_data#.T
# Using ternary operator for shortness
n = n_layers if n_layers else len(weights)
for i in range(n):
print('Weights and biases for layer: ' + str(i+1))
print np.asarray(weights[i]).shape, np.asarray(biases[i]).shape
act = get_activations(act.T, weights[i], biases[i])
rf = ensemble.RandomForestClassifier(n_estimators=1000, oob_score=True, max_depth=5)
rfit = rf.fit(act, labels)
print('OOB score: %.2f\n' % rfit.oob_score_)
|
4986f02edbe45d73f8509b01270490cd8c8f90dd
|
docs/source/examples/chapel.sfile-inline.py
|
docs/source/examples/chapel.sfile-inline.py
|
from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
Use repository hierarchy instead of absolute path for sfile
|
Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.
|
Python
|
apache-2.0
|
chapel-lang/pychapel,chapel-lang/pychapel,russel/pychapel,safl/pychapel,chapel-lang/pychapel,safl/pychapel,safl/pychapel,russel/pychapel,russel/pychapel,safl/pychapel
|
from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.
|
from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
<commit_before>from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
<commit_msg>Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.<commit_after>
|
from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
<commit_before>from pych.extern import Chapel
@Chapel(sfile="/home/safl/pychapel/module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
<commit_msg>Use repository hierarchy instead of absolute path for sfile
The test chapel.sfile-inline.py was depending on an absolute path to find the
location of a chapel file that was outside the normal sfile storage location.
The absolute location was both machine and user-specific. I replaced the path
with a relative path that depends on the directory structure for the repository.
This allows a wider ability to successfully run this test. It's a little
finicky, but I intend to make the test more robust later.<commit_after>from pych.extern import Chapel
import os
currentloc = os.getcwd();
# Note: depends on test living in a specific location relative to
# mymodule.chpl. Not ideal, but also not a huge issue.
@Chapel(sfile=currentloc + "/../../../module/ext/src/mymodule.chpl")
def hello_mymodule():
return None
@Chapel()
def hello_inline():
"""
writeln("Hello from inline.");
"""
return None
if __name__ == "__main__":
hello_mymodule()
hello_inline()
|
6b23446292ce8e35f1e4fd5fa0bb73ca5596eddb
|
plotly/tests/test_core/test_plotly/test_credentials.py
|
plotly/tests/test_core/test_plotly/test_credentials.py
|
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
|
from unittest import TestCase
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
class TestSignIn(TestCase):
def test_get_config(self):
plotly_domain = 'test domain'
plotly_streaming_domain = 'test streaming domain'
config1 = py.get_config()
py._config['plotly_domain'] = plotly_domain
config2 = py.get_config()
py._config['plotly_streaming_domain'] = plotly_streaming_domain
config3 = py.get_config()
self.assertEqual(config2['plotly_domain'], plotly_domain)
self.assertNotEqual(
config2['plotly_streaming_domain'], plotly_streaming_domain
)
self.assertEqual(
config3['plotly_streaming_domain'], plotly_streaming_domain
)
|
Add a test for get_config()
|
Add a test for get_config()
|
Python
|
mit
|
ee-in/python-api,plotly/python-api,plotly/plotly.py,ee-in/python-api,plotly/plotly.py,ee-in/python-api,plotly/python-api,plotly/python-api,plotly/plotly.py
|
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == siAdd a test for get_config()
|
from unittest import TestCase
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
class TestSignIn(TestCase):
def test_get_config(self):
plotly_domain = 'test domain'
plotly_streaming_domain = 'test streaming domain'
config1 = py.get_config()
py._config['plotly_domain'] = plotly_domain
config2 = py.get_config()
py._config['plotly_streaming_domain'] = plotly_streaming_domain
config3 = py.get_config()
self.assertEqual(config2['plotly_domain'], plotly_domain)
self.assertNotEqual(
config2['plotly_streaming_domain'], plotly_streaming_domain
)
self.assertEqual(
config3['plotly_streaming_domain'], plotly_streaming_domain
)
|
<commit_before>import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si<commit_msg>Add a test for get_config()<commit_after>
|
from unittest import TestCase
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
class TestSignIn(TestCase):
def test_get_config(self):
plotly_domain = 'test domain'
plotly_streaming_domain = 'test streaming domain'
config1 = py.get_config()
py._config['plotly_domain'] = plotly_domain
config2 = py.get_config()
py._config['plotly_streaming_domain'] = plotly_streaming_domain
config3 = py.get_config()
self.assertEqual(config2['plotly_domain'], plotly_domain)
self.assertNotEqual(
config2['plotly_streaming_domain'], plotly_streaming_domain
)
self.assertEqual(
config3['plotly_streaming_domain'], plotly_streaming_domain
)
|
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == siAdd a test for get_config()from unittest import TestCase
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
class TestSignIn(TestCase):
def test_get_config(self):
plotly_domain = 'test domain'
plotly_streaming_domain = 'test streaming domain'
config1 = py.get_config()
py._config['plotly_domain'] = plotly_domain
config2 = py.get_config()
py._config['plotly_streaming_domain'] = plotly_streaming_domain
config3 = py.get_config()
self.assertEqual(config2['plotly_domain'], plotly_domain)
self.assertNotEqual(
config2['plotly_streaming_domain'], plotly_streaming_domain
)
self.assertEqual(
config3['plotly_streaming_domain'], plotly_streaming_domain
)
|
<commit_before>import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si<commit_msg>Add a test for get_config()<commit_after>from unittest import TestCase
import plotly.plotly.plotly as py
import plotly.tools as tls
def test_get_credentials():
if 'username' in py._credentials:
del py._credentials['username']
if 'api_key' in py._credentials:
del py._credentials['api_key']
creds = py.get_credentials()
file_creds = tls.get_credentials_file()
print(creds)
print(file_creds)
assert creds == file_creds
def test_sign_in():
un = 'anyone'
ak = 'something'
# TODO, add this!
# si = ['this', 'and-this']
py.sign_in(un, ak)
creds = py.get_credentials()
assert creds['username'] == un
assert creds['api_key'] == ak
# TODO, and check it!
# assert creds['stream_ids'] == si
class TestSignIn(TestCase):
def test_get_config(self):
plotly_domain = 'test domain'
plotly_streaming_domain = 'test streaming domain'
config1 = py.get_config()
py._config['plotly_domain'] = plotly_domain
config2 = py.get_config()
py._config['plotly_streaming_domain'] = plotly_streaming_domain
config3 = py.get_config()
self.assertEqual(config2['plotly_domain'], plotly_domain)
self.assertNotEqual(
config2['plotly_streaming_domain'], plotly_streaming_domain
)
self.assertEqual(
config3['plotly_streaming_domain'], plotly_streaming_domain
)
|
90974a088813dcc3a0c4a7cae5758f67c4b52a15
|
qual/tests/test_calendar.py
|
qual/tests/test_calendar.py
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
Check a leap year date from before the start of the calendar.
|
Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
<commit_before>import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
<commit_msg>Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.<commit_after>
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
<commit_before>import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def test_valid_date(self):
d = self.calendar.date(1200, 2, 29)
self.assertIsNotNone(d)
<commit_msg>Check a leap year date from before the start of the calendar.
This is not really a strong test of the proleptic calendar. All days back to year 1 which are valid in the Julian calendar are valid in the Gregorian calendar.<commit_after>import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
dc70fb35a104e260b40425fce23cba84b9770994
|
addons/event/models/res_partner.py
|
addons/event/models/res_partner.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
Set default value for event_count
|
[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com>
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com>
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
<commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
<commit_msg>[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com><commit_after>
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
<commit_before># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
<commit_msg>[FIX] event: Set default value for event_count
Fixes https://github.com/odoo/odoo/pull/39583
This commit adds a default value for event_count
Assigning default value for non-stored compute fields is required in 13.0
closes odoo/odoo#39974
X-original-commit: 9ca72b98f54d7686c0e6019870b40f14dbdd2881
Signed-off-by: Victor Feyens (vfe) <433cda6c0f0b5b2dac2ef769109a6da90db60157@odoo.com><commit_after># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
event_count = fields.Integer("Events", compute='_compute_event_count', help="Number of events the partner has participated.")
def _compute_event_count(self):
self.event_count = 0
if not self.user_has_groups('event.group_event_user'):
return
for partner in self:
partner.event_count = self.env['event.event'].search_count([('registration_ids.partner_id', 'child_of', partner.ids)])
def action_event_view(self):
action = self.env.ref('event.action_event_view').read()[0]
action['context'] = {}
action['domain'] = [('registration_ids.partner_id', 'child_of', self.ids)]
return action
|
4f776ca2260419c06c2594568c73ce279426d039
|
GenotypeNetwork/test_genotype_network.py
|
GenotypeNetwork/test_genotype_network.py
|
import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
Make tests run from correct directory.
|
Make tests run from correct directory.
|
Python
|
mit
|
ericmjl/genotype-network
|
import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
Make tests run from correct directory.
|
import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
<commit_before>import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
<commit_msg>Make tests run from correct directory.<commit_after>
|
import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
Make tests run from correct directory.import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
<commit_before>import GenotypeNetwork as gn
import os
import networkx as nx
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
<commit_msg>Make tests run from correct directory.<commit_after>import GenotypeNetwork as gn
import os
import networkx as nx
# Change cwd for tests to the current path.
here = os.path.dirname(os.path.realpath(__file__))
os.chdir(here)
GN = gn.GenotypeNetwork()
GN.read_sequences('test/Demo_052715.fasta')
GN.generate_genotype_network()
GN.write_genotype_network('test/Demo_052715.pkl')
GN.read_genotype_network('test/Demo_052715.pkl')
def test_read_sequences_works_correctly():
"""
Checks that GN.read_sequences reads in correct number of sequences.
"""
assert len(GN.sequences) == 3
def test_generate_genotype_network():
"""
Checks that the number of nodes equals the number of sequences
Checks number of edges
"""
assert len(GN.sequences) == len(GN.G.nodes())
assert len(GN.G.edges()) == 2 # This will change based on dataset
def test_write_genotype_network():
"""
Checks that the pickled network is written to disk.
"""
assert 'Demo_052715.pkl' in os.listdir('Test')
def test_read_genotype_network():
"""
Checks that the genotype network is being loaded correctly by counting
nodes in a test pkl file.
"""
G = nx.read_gpickle('Test/Demo_052715.pkl')
# The length of the test file
assert len(G.nodes()) == 3
|
43f58f5378dda9c90f4d891d22d6f44debb3700e
|
service/es_access.py
|
service/es_access.py
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'street_name': {'missing': '_last'}},
{'house_no': {'missing': '_last'}},
{'house_alpha': {'missing': '_last'}},
{'street_name_2': {'missing': '_last'}},
{'secondary_house_no': {'missing': '_last'}},
{'secondary_house_alpha': {'missing': '_last'}},
{'sub_building_no': {'missing': '_last'}},
{'sub_building_description': {'missing': '_last'}},
{'first_number_in_address_string': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'house_number_or_first_number': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
Order only by house/initial number then address string
|
Order only by house/initial number then address string
|
Python
|
mit
|
LandRegistry/digital-register-api,LandRegistry/digital-register-api
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'street_name': {'missing': '_last'}},
{'house_no': {'missing': '_last'}},
{'house_alpha': {'missing': '_last'}},
{'street_name_2': {'missing': '_last'}},
{'secondary_house_no': {'missing': '_last'}},
{'secondary_house_alpha': {'missing': '_last'}},
{'sub_building_no': {'missing': '_last'}},
{'sub_building_description': {'missing': '_last'}},
{'first_number_in_address_string': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
Order only by house/initial number then address string
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'house_number_or_first_number': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
<commit_before>from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'street_name': {'missing': '_last'}},
{'house_no': {'missing': '_last'}},
{'house_alpha': {'missing': '_last'}},
{'street_name_2': {'missing': '_last'}},
{'secondary_house_no': {'missing': '_last'}},
{'secondary_house_alpha': {'missing': '_last'}},
{'sub_building_no': {'missing': '_last'}},
{'sub_building_description': {'missing': '_last'}},
{'first_number_in_address_string': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
<commit_msg>Order only by house/initial number then address string<commit_after>
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'house_number_or_first_number': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'street_name': {'missing': '_last'}},
{'house_no': {'missing': '_last'}},
{'house_alpha': {'missing': '_last'}},
{'street_name_2': {'missing': '_last'}},
{'secondary_house_no': {'missing': '_last'}},
{'secondary_house_alpha': {'missing': '_last'}},
{'sub_building_no': {'missing': '_last'}},
{'sub_building_description': {'missing': '_last'}},
{'first_number_in_address_string': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
Order only by house/initial number then address stringfrom elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'house_number_or_first_number': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
<commit_before>from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'street_name': {'missing': '_last'}},
{'house_no': {'missing': '_last'}},
{'house_alpha': {'missing': '_last'}},
{'street_name_2': {'missing': '_last'}},
{'secondary_house_no': {'missing': '_last'}},
{'secondary_house_alpha': {'missing': '_last'}},
{'sub_building_no': {'missing': '_last'}},
{'sub_building_description': {'missing': '_last'}},
{'first_number_in_address_string': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
<commit_msg>Order only by house/initial number then address string<commit_after>from elasticsearch import Elasticsearch
from elasticsearch_dsl import Search
from service import app
ELASTICSEARCH_ENDPOINT = app.config['ELASTIC_SEARCH_ENDPOINT']
MAX_NUMBER_SEARCH_RESULTS = app.config['MAX_NUMBER_SEARCH_RESULTS']
# TODO: write integration tests for this module
def get_properties_for_postcode(postcode):
search = create_search('property_by_postcode_3')
query = search.filter('term', postcode=postcode).sort(
{'house_number_or_first_number': {'missing': '_last'}},
{'address_string': {'missing': '_last'}}
)
return query.execute().hits
def get_properties_for_address(address):
search = create_search('property_by_address')
query = search.query('match', address_string=address)
return query.execute().hits
def create_search(doc_type):
client = Elasticsearch([ELASTICSEARCH_ENDPOINT])
search = Search(using=client, index='landregistry', doc_type=doc_type)
search = search[0:MAX_NUMBER_SEARCH_RESULTS]
return search
def get_info():
return Elasticsearch([ELASTICSEARCH_ENDPOINT]).info()
|
453af98b1a05c62acd55afca431236d8f54fdae3
|
test_bert_trainer.py
|
test_bert_trainer.py
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
Test for deterministic results when testing BERT model
|
Test for deterministic results when testing BERT model
|
Python
|
apache-2.0
|
googleinterns/smart-news-query-embeddings,googleinterns/smart-news-query-embeddings
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
Test for deterministic results when testing BERT model
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
<commit_msg>Test for deterministic results when testing BERT model<commit_after>
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
Test for deterministic results when testing BERT modelimport unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
if __name__ == '__main__':
unittest.main()
<commit_msg>Test for deterministic results when testing BERT model<commit_after>import unittest
import time
import pandas as pd
from bert_trainer import BERTTrainer
from utils import *
class TestBERT(unittest.TestCase):
def test_init(self):
trainer = BERTTrainer()
def test_train(self):
output_dir = 'test_{}'.format(str(int(time.time())))
trainer = BERTTrainer(output_dir=output_dir)
print(trainer.bert_model_hub)
data = pd.DataFrame({
'abstract': ['test one', 'test two', 'test three'] * 5,
'section': ['U.S.', 'Arts', 'U.S.'] * 5,
})
data_column = 'abstract'
label_column = 'section'
train_features, test_features, _, label_list = train_and_test_features_from_df(data, data_column, label_column, trainer.bert_model_hub, trainer.max_seq_length)
trainer.train(train_features, label_list)
results = trainer.test(test_features)
print('Evaluation results:', results)
results2 = trainer.test(test_features)
print('Evaluation results:', results2)
eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy']
assertEqual(eval_acc1, eval_acc2)
if __name__ == '__main__':
unittest.main()
|
1d3e956dcf667601feb871eab2a462fa09d0d101
|
tests/test_length.py
|
tests/test_length.py
|
from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
|
from math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
Test the axioms of normed vector spaces
|
tests/length: Test the axioms of normed vector spaces
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
tests/length: Test the axioms of normed vector spaces
|
from math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
<commit_before>from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
<commit_msg>tests/length: Test the axioms of normed vector spaces<commit_after>
|
from math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
tests/length: Test the axioms of normed vector spacesfrom math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
<commit_before>from math import sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
<commit_msg>tests/length: Test the axioms of normed vector spaces<commit_after>from math import fabs, sqrt
import pytest # type: ignore
from hypothesis import given
from ppb_vector import Vector
from utils import floats, isclose, vectors
@pytest.mark.parametrize(
"x, y, expected",
[(6, 8, 10),
(8, 6, 10),
(0, 0, 0),
(-6, -8, 10),
(1, 2, 2.23606797749979)],
)
def test_length(x, y, expected):
vector = Vector(x, y)
assert vector.length == expected
@given(v=vectors())
def test_length_dot(v: Vector):
"""Test that |v| ≃ √v²."""
assert isclose(v.length, sqrt(v * v))
@given(v=vectors())
def test_length_zero(v: Vector):
"""1st axiom of normed vector spaces: |v| = 0 iff v = 0"""
assert (v.length == 0) == (v == (0, 0))
@given(v=vectors(), scalar=floats())
def test_length_scalar(v: Vector, scalar: float):
"""2nd axiom of normed vector spaces: |λv| = |λ| |v|"""
assert isclose((scalar * v).length, fabs(scalar) * v.length)
@given(v=vectors(), w=vectors())
def test_length_triangle(v: Vector, w: Vector):
"""3rd axiom of normed vector spaces: |v+w| = |v| + |w|"""
assert (v + w).length <= v.length + w.length
|
21e03d5f22cc7952bdb12912bd5498755855925a
|
trac/web/__init__.py
|
trac/web/__init__.py
|
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
# Workaround for http://bugs.python.org/issue6763 and
# http://bugs.python.org/issue5853 thread issues
import mimetypes
mimetypes.init()
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
Fix race condition during `mimetypes` initialization.
|
Fix race condition during `mimetypes` initialization.
Initial patch from Steven R. Loomis.
Closes #8629.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
netjunki/trac-Pygit2,jun66j5/trac-ja,jun66j5/trac-ja,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,walty8/trac,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,walty8/trac
|
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
Fix race condition during `mimetypes` initialization.
Initial patch from Steven R. Loomis.
Closes #8629.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
# Workaround for http://bugs.python.org/issue6763 and
# http://bugs.python.org/issue5853 thread issues
import mimetypes
mimetypes.init()
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
<commit_before># With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
<commit_msg>Fix race condition during `mimetypes` initialization.
Initial patch from Steven R. Loomis.
Closes #8629.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
|
# Workaround for http://bugs.python.org/issue6763 and
# http://bugs.python.org/issue5853 thread issues
import mimetypes
mimetypes.init()
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
Fix race condition during `mimetypes` initialization.
Initial patch from Steven R. Loomis.
Closes #8629.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2# Workaround for http://bugs.python.org/issue6763 and
# http://bugs.python.org/issue5853 thread issues
import mimetypes
mimetypes.init()
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
<commit_before># With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
<commit_msg>Fix race condition during `mimetypes` initialization.
Initial patch from Steven R. Loomis.
Closes #8629.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after># Workaround for http://bugs.python.org/issue6763 and
# http://bugs.python.org/issue5853 thread issues
import mimetypes
mimetypes.init()
# With mod_python we'll have to delay importing trac.web.api until
# modpython_frontend.handler() has been called since the
# PYTHON_EGG_CACHE variable is set from there
#
# TODO: Remove this once the Genshi zip_safe issue has been resolved.
import os
from pkg_resources import get_distribution
if not os.path.isdir(get_distribution('genshi').location):
try:
import mod_python.apache
import sys
if 'trac.web.modpython_frontend' in sys.modules:
from trac.web.api import *
except ImportError:
from trac.web.api import *
else:
from trac.web.api import *
|
675b860af3576251b32d14b10600f756fd1aebdf
|
tests/chainer_tests/functions_tests/math_tests/test_sqrt.py
|
tests/chainer_tests/functions_tests/math_tests/test_sqrt.py
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 1, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
Use larger values for stable test.
|
Use larger values for stable test.
|
Python
|
mit
|
cupy/cupy,ktnyt/chainer,ronekko/chainer,okuta/chainer,aonotas/chainer,keisuke-umezawa/chainer,jnishi/chainer,tkerola/chainer,jnishi/chainer,cupy/cupy,wkentaro/chainer,pfnet/chainer,okuta/chainer,hvy/chainer,chainer/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,kiyukuta/chainer,wkentaro/chainer,wkentaro/chainer,keisuke-umezawa/chainer,jnishi/chainer,keisuke-umezawa/chainer,ktnyt/chainer,hvy/chainer,okuta/chainer,delta2323/chainer,niboshi/chainer,chainer/chainer,niboshi/chainer,chainer/chainer,cupy/cupy,ktnyt/chainer,cupy/cupy,jnishi/chainer,keisuke-umezawa/chainer,niboshi/chainer,wkentaro/chainer,ktnyt/chainer,hvy/chainer,ysekky/chainer,chainer/chainer,rezoo/chainer,anaruse/chainer,kashif/chainer
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 1, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
Use larger values for stable test.
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
<commit_before>import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 1, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
<commit_msg>Use larger values for stable test.<commit_after>
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 1, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
Use larger values for stable test.import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
<commit_before>import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 1, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
<commit_msg>Use larger values for stable test.<commit_after>import unittest
import numpy
import chainer.functions as F
from chainer import testing
def make_data(dtype, shape):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy
#
# sqrt
@testing.math_function_test(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
#
# rsqrt
def rsqrt(x, dtype=numpy.float32):
return numpy.reciprocal(numpy.sqrt(x, dtype=dtype))
@testing.math_function_test(F.rsqrt, func_expected=rsqrt, make_data=make_data)
class TestRsqrt(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
|
052f06b0ef4f3c2befaf0cbbfd605e42553b48da
|
h2o-hadoop-common/tests/python/pyunit_trace.py
|
h2o-hadoop-common/tests/python/pyunit_trace.py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
Fix TRACE test also in rel-yau
|
Fix TRACE test also in rel-yau
|
Python
|
apache-2.0
|
h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
Fix TRACE test also in rel-yau
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
<commit_msg>Fix TRACE test also in rel-yau<commit_after>
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
Fix TRACE test also in rel-yau#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
<commit_before>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
import os
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
assert err is not None
assert str(err.message).startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
<commit_msg>Fix TRACE test also in rel-yau<commit_after>#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys, os
sys.path.insert(1, os.path.join("..", "..", "..", "h2o-py"))
import h2o
from h2o.exceptions import H2OServerError
from tests import pyunit_utils
def trace_request():
err = None
try:
h2o.api("TRACE /")
except H2OServerError as e:
err = e
msg = str(err.message)
assert err is not None
print("<Error message>")
print(msg)
print("</Error Message>")
# exact message depends on Jetty Version
assert (msg.startswith("HTTP 500") and "TRACE method is not supported" in msg) or \
msg.startswith("HTTP 405 Method Not Allowed")
if __name__ == "__main__":
pyunit_utils.standalone_test(trace_request)
else:
trace_request()
|
209a8e029e14766027376bb6d8f0b2e0a4a07f1b
|
simulator-perfect.py
|
simulator-perfect.py
|
#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)
|
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)
|
Python
|
apache-2.0
|
sjakthol/dedup-simulator,sjakthol/dedup-simulator
|
#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
<commit_before>#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
<commit_msg>Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)<commit_after>
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
<commit_before>#!/usr/bin/env python3
import timer
import sys
import utils
# A set of files already in the storage
seen = set()
# The total number of uploads
total_uploads = 0
# The number of files in the storage
files_in = 0
tmr = timer.Timer()
for (hsh, _) in utils.read_upload_stream():
if hsh not in seen:
files_in += 1
seen.add(hsh)
total_uploads += 1
if total_uploads % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, %s" % (
utils.num_fmt(total_uploads),
1 - files_in / total_uploads,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - files_in / total_uploads
print("+++ Simulation complete. dedup_percentage=%f" % dedup_percentage,
file=sys.stderr)
<commit_msg>Modify the perfect simulator to calculate dedup percentages based on file sizes (1 - data_in / data_total)<commit_after>#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
dbe57e9b76194b13d90834163ebe8bf924464dd0
|
src/mcedit2/util/lazyprop.py
|
src/mcedit2/util/lazyprop.py
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
Add a property descriptor for weakref'd members
|
Add a property descriptor for weakref'd members
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,Rubisk/mcedit2,Rubisk/mcedit2,vorburger/mcedit2
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
Add a property descriptor for weakref'd members
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
<commit_before>"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
<commit_msg>Add a property descriptor for weakref'd members<commit_after>
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
Add a property descriptor for weakref'd members"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
<commit_before>"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
<commit_msg>Add a property descriptor for weakref'd members<commit_after>"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import weakref
log = logging.getLogger(__name__)
def lazyprop(fn):
"""
Lazily computed property wrapper.
>>> class Foo(object):
... @lazyprop
... def func(self):
... print("Big computation here!")
... return 42
>>> f = Foo()
>>> f.func
Big computation here!
42
>>> f.func
42
>>> del f.func
>>> f.func
Big computation here!
42
:type fn: __builtin__.function
:return:
:rtype:
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazyprop(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
@_lazyprop.deleter
def _lazyprop(self):
if hasattr(self, attr_name):
delattr(self, attr_name)
return _lazyprop
class weakrefprop(object):
def __init__(self, name):
self.name = "__weakprop__" + name
def __get__(self, instance, owner):
ref = getattr(instance, self.name, None)
if ref is None:
return None
return ref()
def __set__(self, instance, value):
setattr(instance, self.name, weakref.ref(value))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.