commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
850e04a7cf045c11fcc0aef04e37268a0d8e20c6
|
src/container.py
|
src/container.py
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
image_name.replace('/', '-')
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
Replace / with - in image domain names
|
Replace / with - in image domain names
|
Python
|
mit
|
regiontog/macvlan-ipvs-dr,regiontog/macvlan-ipvs-dr
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']Replace / with - in image domain names
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
image_name.replace('/', '-')
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
<commit_before>from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']<commit_msg>Replace / with - in image domain names<commit_after>
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
image_name.replace('/', '-')
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']Replace / with - in image domain namesfrom dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
image_name.replace('/', '-')
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
<commit_before>from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']<commit_msg>Replace / with - in image domain names<commit_after>from dock import client
def fmt(container):
image, name = ns(container)
return '[{image}/{name}]'.format(image=image, name=name)
def ns(container):
image_name = container.attrs['Image']
image = client.images.get(image_name)
if len(image.tags) > 0:
image_name = image.tags[0].split(":")[0]
else:
image_name = image.short_id.split(":")[1]
image_name.replace('/', '-')
return image_name, container.name
def exposed_ports(container):
ports = container.attrs['Config']['ExposedPorts'].keys()
for port in ports:
port, protocol = port.split('/')[0], port.split('/')[1]
yield port, protocol
def exposes_ports(container):
return 'ExposedPorts' in container.attrs['Config']
|
a6e87690e6bc9a22ec21874ef2c821f3a2855212
|
gtrutils.py
|
gtrutils.py
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(self, card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
Remove self argument from _get_card_from_zone
|
Remove self argument from _get_card_from_zone
|
Python
|
mit
|
mhmurray/cloaca,mhmurray/cloaca,mhmurray/cloaca,mhmurray/cloaca
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(self, card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
Remove self argument from _get_card_from_zone
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
<commit_before>""" Utility functions for GTR.
"""
def _get_card_from_zone(self, card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
<commit_msg>Remove self argument from _get_card_from_zone<commit_after>
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
""" Utility functions for GTR.
"""
def _get_card_from_zone(self, card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
Remove self argument from _get_card_from_zone""" Utility functions for GTR.
"""
def _get_card_from_zone(card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
<commit_before>""" Utility functions for GTR.
"""
def _get_card_from_zone(self, card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
<commit_msg>Remove self argument from _get_card_from_zone<commit_after>""" Utility functions for GTR.
"""
def _get_card_from_zone(card, l):
""" Wrapper around the possible exception caused by trying to
find a non-existent card in a list. Prints an error and
re-raises the exception.
"""
try:
return l.pop(l.index(card))
except ValueError as e:
print 'Error! card {0!s} not found'.format(card)
raise
|
95d86b30d8c5d922bc7ba17d50e5f83eae086e88
|
__init__.py
|
__init__.py
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
Check that each module is valid before trying to import.
|
Check that each module is valid before trying to import.
|
Python
|
mit
|
embaldridge/retriever,bendmorris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret,goelakash/retriever,davharris/retriever,davharris/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,bendmorris/retriever,bendmorris/retriever
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
Check that each module is valid before trying to import.
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
<commit_before>"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
<commit_msg>Check that each module is valid before trying to import.<commit_after>
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
Check that each module is valid before trying to import."""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
<commit_before>"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
<commit_msg>Check that each module is valid before trying to import.<commit_after>"""Database Toolkit
This package contains a framework for creating and running scripts designed to
download published ecological data, and store the data in a database.
"""
import os
import imp
VERSION = '0.4.1'
REPOSITORY = 'http://www.ecologicaldata.org/dbtk/'
def MODULE_LIST():
"""Load scripts from scripts directory and return list of modules."""
files = [file for file in os.listdir("scripts")
if file[-3:] == ".py" and file[0] != "_"]
modules = []
for script in files:
script_name = '.'.join(script.split('.')[:-1])
file, pathname, desc = imp.find_module(script_name, ["scripts"])
try:
new_module = imp.load_module(script_name, file, pathname, desc)
new_module.SCRIPT
modules.append(new_module)
except:
pass
return modules
def DBTK_LIST():
return [module.SCRIPT for module in MODULE_LIST()]
def ENGINE_LIST():
engines = [
"mysql",
"postgres",
"sqlite",
"msaccess",
]
ENGINE_MODULE_LIST = [
__import__("dbtk.engines." + module, fromlist="engines")
for module in engines
]
return [module.engine() for module in ENGINE_MODULE_LIST]
|
6606467733d9bb6a5c59e9fb72946303cb0b8693
|
src/foremast/dns/__main__.py
|
src/foremast/dns/__main__.py
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="The environment to create the security group",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="Subnetnet type, e.g. external, internal",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
Use better help for --elb-subnet
|
fix: Use better help for --elb-subnet
See also: PSOBAT-1359
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="The environment to create the security group",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
fix: Use better help for --elb-subnet
See also: PSOBAT-1359
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="Subnetnet type, e.g. external, internal",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
<commit_before>"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="The environment to create the security group",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
<commit_msg>fix: Use better help for --elb-subnet
See also: PSOBAT-1359<commit_after>
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="Subnetnet type, e.g. external, internal",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="The environment to create the security group",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
fix: Use better help for --elb-subnet
See also: PSOBAT-1359"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="Subnetnet type, e.g. external, internal",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
<commit_before>"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="The environment to create the security group",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
<commit_msg>fix: Use better help for --elb-subnet
See also: PSOBAT-1359<commit_after>"""Create DNS record."""
import argparse
import logging
from ..args import add_debug
from ..consts import LOGGING_FORMAT
from .create_dns import SpinnakerDns
def main():
"""Run newer stuffs."""
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
parser.add_argument("--app",
help="The application name to create",
required=True)
parser.add_argument("--region",
help="The region to create the security group",
required=True)
parser.add_argument("--env",
help="The environment to create the security group",
required=True)
parser.add_argument("--elb-subnet",
help="Subnetnet type, e.g. external, internal",
required=True)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
log.debug('Parsed arguments: %s', args)
# Dictionary containing application info. This is passed to the class for processing
appinfo = {
'app': args.app,
'region': args.region,
'env': args.env,
'elb_subnet': args.elb_subnet
}
spinnakerapps = SpinnakerDns(app_info=appinfo)
spinnakerapps.create_elb_dns()
if __name__ == "__main__":
main()
|
c4bc4b37b991f428ecfc730d7f8030d8ea52050c
|
src/protocol/caldav/definitions/csxml.py
|
src/protocol/caldav/definitions/csxml.py
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
pushkey = QName(CSNamespace, "pushkey")
|
Define the pushkey attribute for XMPP push
|
Define the pushkey attribute for XMPP push
git-svn-id: b8a2ed21f1aafe1ee9fc65e616c668cc51cd004a@7731 e27351fd-9f3e-4f54-a53b-843176b1656c
|
Python
|
apache-2.0
|
skarra/CalDAVClientLibrary,skarra/CalDAVClientLibrary
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
Define the pushkey attribute for XMPP push
git-svn-id: b8a2ed21f1aafe1ee9fc65e616c668cc51cd004a@7731 e27351fd-9f3e-4f54-a53b-843176b1656c
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
pushkey = QName(CSNamespace, "pushkey")
|
<commit_before>##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
<commit_msg>Define the pushkey attribute for XMPP push
git-svn-id: b8a2ed21f1aafe1ee9fc65e616c668cc51cd004a@7731 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
pushkey = QName(CSNamespace, "pushkey")
|
##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
Define the pushkey attribute for XMPP push
git-svn-id: b8a2ed21f1aafe1ee9fc65e616c668cc51cd004a@7731 e27351fd-9f3e-4f54-a53b-843176b1656c##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
pushkey = QName(CSNamespace, "pushkey")
|
<commit_before>##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
<commit_msg>Define the pushkey attribute for XMPP push
git-svn-id: b8a2ed21f1aafe1ee9fc65e616c668cc51cd004a@7731 e27351fd-9f3e-4f54-a53b-843176b1656c<commit_after>##
# Copyright (c) 2007-2009 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from xml.etree.ElementTree import QName
CSNamespace = "http://calendarserver.org/ns/"
calendar_proxy_read_for = QName(CSNamespace, "calendar-proxy-read-for")
calendar_proxy_write_for = QName(CSNamespace, "calendar-proxy-write-for")
getctag = QName(CSNamespace, "getctag")
notification = QName(CSNamespace, "notification")
notification_URL = QName(CSNamespace, "notification-URL")
# Are these really in this namespace?
dropbox_home = QName(CSNamespace, "dropbox-home")
dropbox_home_URL = QName(CSNamespace, "dropbox-home-URL")
# Defined by caldav-pubsubdiscovery
xmpp_server = QName(CSNamespace, "xmpp-server")
xmpp_uri = QName(CSNamespace, "xmpp-uri")
pushkey = QName(CSNamespace, "pushkey")
|
a4656e18539950c0de0aea08eadf88f841ef24ea
|
scripts/get_bump_version.py
|
scripts/get_bump_version.py
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
from __future__ import print_function
import subprocess
import sys
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
Make sure sys is available for sys.exit() call on failure
|
Make sure sys is available for sys.exit() call on failure
|
Python
|
bsd-3-clause
|
dennisobrien/bokeh,stuart-knock/bokeh,mutirri/bokeh,canavandl/bokeh,daodaoliang/bokeh,abele/bokeh,jakirkham/bokeh,stonebig/bokeh,abele/bokeh,philippjfr/bokeh,rs2/bokeh,rs2/bokeh,aavanian/bokeh,birdsarah/bokeh,srinathv/bokeh,bokeh/bokeh,PythonCharmers/bokeh,paultcochrane/bokeh,gpfreitas/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,rothnic/bokeh,msarahan/bokeh,gpfreitas/bokeh,aiguofer/bokeh,almarklein/bokeh,caseyclements/bokeh,evidation-health/bokeh,stuart-knock/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,eteq/bokeh,phobson/bokeh,CrazyGuo/bokeh,mutirri/bokeh,timothydmorton/bokeh,DuCorey/bokeh,mutirri/bokeh,awanke/bokeh,schoolie/bokeh,ericdill/bokeh,azjps/bokeh,ChristosChristofidis/bokeh,timsnyder/bokeh,roxyboy/bokeh,eteq/bokeh,Karel-van-de-Plassche/bokeh,deeplook/bokeh,lukebarnard1/bokeh,dennisobrien/bokeh,timsnyder/bokeh,ChinaQuants/bokeh,caseyclements/bokeh,khkaminska/bokeh,PythonCharmers/bokeh,ptitjano/bokeh,jplourenco/bokeh,paultcochrane/bokeh,phobson/bokeh,bokeh/bokeh,ericdill/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,xguse/bokeh,mindriot101/bokeh,josherick/bokeh,caseyclements/bokeh,saifrahmed/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,deeplook/bokeh,azjps/bokeh,ericmjl/bokeh,daodaoliang/bokeh,ChinaQuants/bokeh,awanke/bokeh,ericmjl/bokeh,CrazyGuo/bokeh,htygithub/bokeh,alan-unravel/bokeh,alan-unravel/bokeh,abele/bokeh,tacaswell/bokeh,saifrahmed/bokeh,alan-unravel/bokeh,jakirkham/bokeh,timothydmorton/bokeh,ptitjano/bokeh,caseyclements/bokeh,rhiever/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,DuCorey/bokeh,schoolie/bokeh,percyfal/bokeh,DuCorey/bokeh,ahmadia/bokeh,schoolie/bokeh,saifrahmed/bokeh,ericdill/bokeh,justacec/bokeh,azjps/bokeh,canavandl/bokeh,laurent-george/bokeh,eteq/bokeh,htygithub/bokeh,philippjfr/bokeh,clairetang6/bokeh,roxyboy/bokeh,birdsarah/bokeh,schoolie/bokeh,percyfal/bokeh,evidation-health/bokeh,matbra/bokeh,matbra/bokeh,rs2/bokeh,htygithub/bokeh,ChinaQuants/bokeh,jplourenco/bokeh,ptitjano/bokeh,muku42/bokeh,timothydmorton/bokeh,CrazyGuo/bokeh,carlvlewis/bokeh,xguse/bokeh,Karel-van-de-Plassche/bokeh,awanke/bokeh,quasiben/bokeh,rothnic/bokeh,xguse/bokeh,muku42/bokeh,rothnic/bokeh,xguse/bokeh,jakirkham/bokeh,alan-unravel/bokeh,roxyboy/bokeh,stuart-knock/bokeh,muku42/bokeh,quasiben/bokeh,phobson/bokeh,aiguofer/bokeh,philippjfr/bokeh,satishgoda/bokeh,PythonCharmers/bokeh,deeplook/bokeh,almarklein/bokeh,dennisobrien/bokeh,bsipocz/bokeh,matbra/bokeh,akloster/bokeh,ptitjano/bokeh,ericdill/bokeh,mindriot101/bokeh,carlvlewis/bokeh,jplourenco/bokeh,khkaminska/bokeh,laurent-george/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,birdsarah/bokeh,ChinaQuants/bokeh,mindriot101/bokeh,awanke/bokeh,timsnyder/bokeh,clairetang6/bokeh,bokeh/bokeh,rs2/bokeh,clairetang6/bokeh,rhiever/bokeh,josherick/bokeh,rhiever/bokeh,bsipocz/bokeh,CrazyGuo/bokeh,draperjames/bokeh,carlvlewis/bokeh,saifrahmed/bokeh,rhiever/bokeh,htygithub/bokeh,srinathv/bokeh,maxalbert/bokeh,percyfal/bokeh,mindriot101/bokeh,ericmjl/bokeh,draperjames/bokeh,maxalbert/bokeh,ericmjl/bokeh,bsipocz/bokeh,matbra/bokeh,gpfreitas/bokeh,azjps/bokeh,quasiben/bokeh,draperjames/bokeh,eteq/bokeh,draperjames/bokeh,jakirkham/bokeh,stonebig/bokeh,timothydmorton/bokeh,tacaswell/bokeh,laurent-george/bokeh,ahmadia/bokeh,Karel-van-de-Plassche/bokeh,ahmadia/bokeh,carlvlewis/bokeh,jplourenco/bokeh,maxalbert/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,maxalbert/bokeh,msarahan/bokeh,birdsarah/bokeh,rothnic/bokeh,philippjfr/bokeh,mutirri/bokeh,canavandl/bokeh,abele/bokeh,khkaminska/bokeh,timsnyder/bokeh,aiguofer/bokeh,bokeh/bokeh,tacaswell/bokeh,msarahan/bokeh,josherick/bokeh,daodaoliang/bokeh,ericmjl/bokeh,aavanian/bokeh,aiguofer/bokeh,schoolie/bokeh,akloster/bokeh,lukebarnard1/bokeh,satishgoda/bokeh,canavandl/bokeh,jakirkham/bokeh,philippjfr/bokeh,aavanian/bokeh,stonebig/bokeh,msarahan/bokeh,PythonCharmers/bokeh,justacec/bokeh,phobson/bokeh,aiguofer/bokeh,dennisobrien/bokeh,ChristosChristofidis/bokeh,bokeh/bokeh,paultcochrane/bokeh,stonebig/bokeh,srinathv/bokeh,satishgoda/bokeh,evidation-health/bokeh,akloster/bokeh,ahmadia/bokeh,clairetang6/bokeh,DuCorey/bokeh,deeplook/bokeh,laurent-george/bokeh,muku42/bokeh,stuart-knock/bokeh,satishgoda/bokeh,josherick/bokeh,lukebarnard1/bokeh,daodaoliang/bokeh,khkaminska/bokeh,timsnyder/bokeh,paultcochrane/bokeh,percyfal/bokeh,rs2/bokeh,tacaswell/bokeh,akloster/bokeh,ChristosChristofidis/bokeh,aavanian/bokeh,evidation-health/bokeh,lukebarnard1/bokeh,ChristosChristofidis/bokeh,bsipocz/bokeh,justacec/bokeh,almarklein/bokeh,srinathv/bokeh,KasperPRasmussen/bokeh,ptitjano/bokeh,roxyboy/bokeh,azjps/bokeh
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
Make sure sys is available for sys.exit() call on failure
|
from __future__ import print_function
import subprocess
import sys
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
<commit_before>from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
<commit_msg>Make sure sys is available for sys.exit() call on failure<commit_after>
|
from __future__ import print_function
import subprocess
import sys
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
Make sure sys is available for sys.exit() call on failurefrom __future__ import print_function
import subprocess
import sys
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
<commit_before>from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
<commit_msg>Make sure sys is available for sys.exit() call on failure<commit_after>from __future__ import print_function
import subprocess
import sys
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, mod = version.split("-")[:2]
except ValueError:
vers, mod = version, ""
return vers, mod
vers, mod = get_version_from_git()
vals = vers.split('.')
if not mod.startswith('rc'):
#check for X.X and increment to X.X.1
if len(vals) < 3:
new_ver = '.'.join(vals) + '.1'
print(new_ver)
else:
new_val = int(vals[-1]) + 1
new_val = str(new_val)
vals[-1] = new_val
new_ver = '.'.join(vals)
print(new_ver)
else:
new_ver = vers + '-' + mod
print(new_ver)
|
5000ed8fa0426a7968a0db4a89d221ef800a2da7
|
wordsegmenterTC/__init__.py
|
wordsegmenterTC/__init__.py
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
|
Fix str index of range in some case
|
Fix str index of range in some case
|
Python
|
mit
|
tchayintr/wordsegmenterTC
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
Fix str index of range in some case
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
|
<commit_before>import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
<commit_msg>Fix str index of range in some case<commit_after>
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
|
import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
Fix str index of range in some caseimport PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
|
<commit_before>import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except StopIteration:
pass
return retText
<commit_msg>Fix str index of range in some case<commit_after>import PyICU
SEPARATER = " "
class Segmenter:
def isThai(self, chr):
cVal = ord(chr)
if (cVal >= 3584 and cVal <= 3711):
return True
return False
def segment(self, text):
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(text)
lastPos = bd.first()
retText = ""
try:
while(True):
currentPos = next(bd)
retText += text[lastPos:currentPos]
try:
if (self.isThai(text[currentPos - 1])):
if (currentPos < len(text)):
if (self.isThai(text[currentPos])):
# Separater
retText += SEPARATER
lastPos = currentPos
except:
pass
except StopIteration:
pass
return retText
|
81ddb6a5242db63bc7e5cf10f0419d339404cec3
|
DebianChangesBot/formatters/upload_accepted.py
|
DebianChangesBot/formatters/upload_accepted.py
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset] " % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += "(Closes: %s) " % bug_list
msg += "http://packages.qa.debian.org/%s" % self.package
return msg
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset]" % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += " (Closes: %s)" % bug_list
msg += ". http://packages.qa.debian.org/%s" % self.package
return msg
|
Fix additional fullstop on accepted upload formatter
|
Fix additional fullstop on accepted upload formatter
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
agpl-3.0
|
lamby/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,xtaran/debian-devel-changes-bot
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset] " % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += "(Closes: %s) " % bug_list
msg += "http://packages.qa.debian.org/%s" % self.package
return msg
Fix additional fullstop on accepted upload formatter
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset]" % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += " (Closes: %s)" % bug_list
msg += ". http://packages.qa.debian.org/%s" % self.package
return msg
|
<commit_before>
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset] " % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += "(Closes: %s) " % bug_list
msg += "http://packages.qa.debian.org/%s" % self.package
return msg
<commit_msg>Fix additional fullstop on accepted upload formatter
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset]" % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += " (Closes: %s)" % bug_list
msg += ". http://packages.qa.debian.org/%s" % self.package
return msg
|
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset] " % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += "(Closes: %s) " % bug_list
msg += "http://packages.qa.debian.org/%s" % self.package
return msg
Fix additional fullstop on accepted upload formatter
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset]" % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += " (Closes: %s)" % bug_list
msg += ". http://packages.qa.debian.org/%s" % self.package
return msg
|
<commit_before>
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset] " % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += "(Closes: %s) " % bug_list
msg += "http://packages.qa.debian.org/%s" % self.package
return msg
<commit_msg>Fix additional fullstop on accepted upload formatter
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
from DebianChangesBot import Formatter
from DebianChangesBot.datasources import NewQueue
class UploadAcceptedFormatter(Formatter):
FIELDS = ('package', 'version', 'distribution', 'urgency', 'by')
OPTIONAL = ('closes',)
def format(self):
msg = "[green]%s[reset] "
if NewQueue().is_new(self.package):
msg += "[red](NEW)[reset] "
msg += "[yellow]%s[reset]) uploaded " % self.version
if self.distribution != 'unstable':
msg += "to [blue]%s[reset] "
if self.urgency != 'low':
msg += "with urgency [red]%s[reset]" % self.urgency
msg += "by [cyan]%s[reset]" % self.format_email_address(self.by)
if self.closes and '-backports' not in self.distribution:
bug_list = ', '.join(["[b]#%s[/b]" % x for x in self.closes.split(' ')])
msg += " (Closes: %s)" % bug_list
msg += ". http://packages.qa.debian.org/%s" % self.package
return msg
|
12327b28697e3d27a6f92863091a84d9b56c0eec
|
openrcv/test/test_datagen.py
|
openrcv/test/test_datagen.py
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0], [1, 1]),
# Check terminating the list early.
(([1, 2], ), [2], []),
# TODO: more.
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check terminating the list early.
(([1, 2], ), [2], []),
# Check that duplications are allowed.
(([1, 2], ), [0, 0], [1, 1]),
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0, 0], [1, 1]),
# Check that max_length is respected.
(([1, 2], 3), [0, 0, 0, 0], [1, 1, 1]),
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
Add more datagen test cases.
|
Add more datagen test cases.
|
Python
|
mit
|
cjerdonek/open-rcv,cjerdonek/open-rcv
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0], [1, 1]),
# Check terminating the list early.
(([1, 2], ), [2], []),
# TODO: more.
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
Add more datagen test cases.
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check terminating the list early.
(([1, 2], ), [2], []),
# Check that duplications are allowed.
(([1, 2], ), [0, 0], [1, 1]),
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0, 0], [1, 1]),
# Check that max_length is respected.
(([1, 2], 3), [0, 0, 0, 0], [1, 1, 1]),
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
<commit_before>
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0], [1, 1]),
# Check terminating the list early.
(([1, 2], ), [2], []),
# TODO: more.
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
<commit_msg>Add more datagen test cases.<commit_after>
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check terminating the list early.
(([1, 2], ), [2], []),
# Check that duplications are allowed.
(([1, 2], ), [0, 0], [1, 1]),
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0, 0], [1, 1]),
# Check that max_length is respected.
(([1, 2], 3), [0, 0, 0, 0], [1, 1, 1]),
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0], [1, 1]),
# Check terminating the list early.
(([1, 2], ), [2], []),
# TODO: more.
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
Add more datagen test cases.
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check terminating the list early.
(([1, 2], ), [2], []),
# Check that duplications are allowed.
(([1, 2], ), [0, 0], [1, 1]),
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0, 0], [1, 1]),
# Check that max_length is respected.
(([1, 2], 3), [0, 0, 0, 0], [1, 1, 1]),
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
<commit_before>
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0], [1, 1]),
# Check terminating the list early.
(([1, 2], ), [2], []),
# TODO: more.
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
<commit_msg>Add more datagen test cases.<commit_after>
from unittest import TestCase
from unittest.mock import patch
from openrcv.datagen import gen_random_list
class ModuleTest(TestCase):
def make_randint(self, values):
values = iter(values)
def randint(*args):
try:
return next(values)
except StopIteration: # pragma: no cover
raise Exception("to fix this, pass in more values for your test")
return randint
def test_gen_random_list(self):
cases = (
# Check terminating the list early.
(([1, 2], ), [2], []),
# Check that duplications are allowed.
(([1, 2], ), [0, 0], [1, 1]),
# Check that max_length defaults to the number of choices.
(([1, 2], ), [0, 0, 0, 0], [1, 1]),
# Check that max_length is respected.
(([1, 2], 3), [0, 0, 0, 0], [1, 1, 1]),
)
for args, randint_vals, expected in cases:
with self.subTest(args=args, expected=expected, randint_vals=randint_vals):
randint = self.make_randint(randint_vals)
with patch('random.randint', randint):
self.assertEqual(gen_random_list(*args), expected)
|
cb78ebc617c5ac8370321a10bd7f6ee418a77b7e
|
grade-school/grade_school.py
|
grade-school/grade_school.py
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
self.students = students[]
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
students = {}
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
def add(self, student, grade):
for student, grade in students.items():
for s, g in values.items():
|
Add student name and grade
|
Add student name and grade
|
Python
|
mit
|
amalshehu/exercism-python
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
self.students = students[]
Add student name and grade
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
students = {}
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
def add(self, student, grade):
for student, grade in students.items():
for s, g in values.items():
|
<commit_before># File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
self.students = students[]
<commit_msg>Add student name and grade<commit_after>
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
students = {}
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
def add(self, student, grade):
for student, grade in students.items():
for s, g in values.items():
|
# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
self.students = students[]
Add student name and grade# File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
students = {}
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
def add(self, student, grade):
for student, grade in students.items():
for s, g in values.items():
|
<commit_before># File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
self.students = students[]
<commit_msg>Add student name and grade<commit_after># File: grade_school.py
# Purpose: Write a small archiving program that stores students' names along with the grade that they are in.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 12th September 2016, 11:00 PM
class School(object):
"""docstring for School."""
students = {}
def __init__(self, school, grade, student):
self.school = school
self.grade = grade
self.student = student
def add(self, student, grade):
for student, grade in students.items():
for s, g in values.items():
|
3389c6208a86d4ec7ba9594e6f0f57f082d81882
|
gitfs/views/history_index.py
|
gitfs/views/history_index.py
|
from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
|
from datetime import datetime
from errno import ENOENT
from stat import S_IFDIR
from pygit2 import GIT_SORT_TIME
from .view import View
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def _get_commit_history(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
paths = {}
for commit in self.repo.walk(self.repo.head.target, GIT_SORT_TIME):
commit_time = datetime.fromtimestamp(commit.commit_time)
day = "%s-%s-%s" % (commit_time.year, commit_time.month,
commit_time.day)
time = "%s-%s-%s" % (commit_time.hour, commit_time.minute,
commit_time.second)
paths[day] = "%s-%s" % (time, commit.hex[:7])
#paths[day] = "%s-%s" % (time, commit.hex)
return paths
def readdir(self, path, fh):
commit_hist = self._get_commit_history()
dir_entries = ['.', '..'] + commit_hist.keys()
for entry in dir_entries:
yield entry
|
Update HistoryIndeView - listdir is working.
|
Update HistoryIndeView - listdir is working.
|
Python
|
apache-2.0
|
PressLabs/gitfs,rowhit/gitfs,bussiere/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs
|
from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
Update HistoryIndeView - listdir is working.
|
from datetime import datetime
from errno import ENOENT
from stat import S_IFDIR
from pygit2 import GIT_SORT_TIME
from .view import View
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def _get_commit_history(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
paths = {}
for commit in self.repo.walk(self.repo.head.target, GIT_SORT_TIME):
commit_time = datetime.fromtimestamp(commit.commit_time)
day = "%s-%s-%s" % (commit_time.year, commit_time.month,
commit_time.day)
time = "%s-%s-%s" % (commit_time.hour, commit_time.minute,
commit_time.second)
paths[day] = "%s-%s" % (time, commit.hex[:7])
#paths[day] = "%s-%s" % (time, commit.hex)
return paths
def readdir(self, path, fh):
commit_hist = self._get_commit_history()
dir_entries = ['.', '..'] + commit_hist.keys()
for entry in dir_entries:
yield entry
|
<commit_before>from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
<commit_msg>Update HistoryIndeView - listdir is working.<commit_after>
|
from datetime import datetime
from errno import ENOENT
from stat import S_IFDIR
from pygit2 import GIT_SORT_TIME
from .view import View
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def _get_commit_history(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
paths = {}
for commit in self.repo.walk(self.repo.head.target, GIT_SORT_TIME):
commit_time = datetime.fromtimestamp(commit.commit_time)
day = "%s-%s-%s" % (commit_time.year, commit_time.month,
commit_time.day)
time = "%s-%s-%s" % (commit_time.hour, commit_time.minute,
commit_time.second)
paths[day] = "%s-%s" % (time, commit.hex[:7])
#paths[day] = "%s-%s" % (time, commit.hex)
return paths
def readdir(self, path, fh):
commit_hist = self._get_commit_history()
dir_entries = ['.', '..'] + commit_hist.keys()
for entry in dir_entries:
yield entry
|
from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
Update HistoryIndeView - listdir is working.
from datetime import datetime
from errno import ENOENT
from stat import S_IFDIR
from pygit2 import GIT_SORT_TIME
from .view import View
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def _get_commit_history(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
paths = {}
for commit in self.repo.walk(self.repo.head.target, GIT_SORT_TIME):
commit_time = datetime.fromtimestamp(commit.commit_time)
day = "%s-%s-%s" % (commit_time.year, commit_time.month,
commit_time.day)
time = "%s-%s-%s" % (commit_time.hour, commit_time.minute,
commit_time.second)
paths[day] = "%s-%s" % (time, commit.hex[:7])
#paths[day] = "%s-%s" % (time, commit.hex)
return paths
def readdir(self, path, fh):
commit_hist = self._get_commit_history()
dir_entries = ['.', '..'] + commit_hist.keys()
for entry in dir_entries:
yield entry
|
<commit_before>from .view import View
from errno import ENOENT
from stat import S_IFDIR
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def readdir(self, path, fh):
return ['.', '..', 'commits everywhere']
<commit_msg>Update HistoryIndeView - listdir is working.<commit_after>
from datetime import datetime
from errno import ENOENT
from stat import S_IFDIR
from pygit2 import GIT_SORT_TIME
from .view import View
from gitfs import FuseMethodNotImplemented, FuseOSError
from log import log
class HistoryIndexView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path != '/':
raise FuseOSError(ENOENT)
return dict(st_mode=(S_IFDIR | 0755), st_nlink=2)
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
log.info('%s %s', path, amode)
return 0
def _get_commit_history(self):
"""
Walk through all commits from current repo in order to compose the
_history_ directory.
"""
paths = {}
for commit in self.repo.walk(self.repo.head.target, GIT_SORT_TIME):
commit_time = datetime.fromtimestamp(commit.commit_time)
day = "%s-%s-%s" % (commit_time.year, commit_time.month,
commit_time.day)
time = "%s-%s-%s" % (commit_time.hour, commit_time.minute,
commit_time.second)
paths[day] = "%s-%s" % (time, commit.hex[:7])
#paths[day] = "%s-%s" % (time, commit.hex)
return paths
def readdir(self, path, fh):
commit_hist = self._get_commit_history()
dir_entries = ['.', '..'] + commit_hist.keys()
for entry in dir_entries:
yield entry
|
5150c61929271167556e1e337de1db573a5719ea
|
tests/unittests/framework_file_server_tests.py
|
tests/unittests/framework_file_server_tests.py
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
# db.insert = Mock(side
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
def test_AddShouldCallInsertOnDatabase(self):
fs = file_server.FileServer()
db = main.sessionDB()
db.insert = Mock(return_value=None)
fs.add(db, "This is file data", "myapp")
self.assertEqual(db.insert.call_count, 1)
|
Add test that db.insert is called when add is called
|
Add test that db.insert is called when add is called
|
Python
|
agpl-3.0
|
localprojects/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
Add test that db.insert is called when add is called
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
# db.insert = Mock(side
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
def test_AddShouldCallInsertOnDatabase(self):
fs = file_server.FileServer()
db = main.sessionDB()
db.insert = Mock(return_value=None)
fs.add(db, "This is file data", "myapp")
self.assertEqual(db.insert.call_count, 1)
|
<commit_before>from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
<commit_msg>Add test that db.insert is called when add is called<commit_after>
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
# db.insert = Mock(side
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
def test_AddShouldCallInsertOnDatabase(self):
fs = file_server.FileServer()
db = main.sessionDB()
db.insert = Mock(return_value=None)
fs.add(db, "This is file data", "myapp")
self.assertEqual(db.insert.call_count, 1)
|
from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
Add test that db.insert is called when add is calledfrom unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
# db.insert = Mock(side
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
def test_AddShouldCallInsertOnDatabase(self):
fs = file_server.FileServer()
db = main.sessionDB()
db.insert = Mock(return_value=None)
fs.add(db, "This is file data", "myapp")
self.assertEqual(db.insert.call_count, 1)
|
<commit_before>from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
<commit_msg>Add test that db.insert is called when add is called<commit_after>from unittest import TestCase
from lib import web
from mock import Mock
import main
import framework.file_server as file_server
class BaseFileServerTests (TestCase):
def test_AddShouldReturnNoneIfDbInsertionFails(self):
fs = file_server.FileServer()
fs._addDbRecord = Mock(return_value=None)
fs._saveFile = Mock()
db = main.sessionDB()
# db.insert = Mock(side
id = fs.add(db, "This is file data", "myapp")
self.assertEqual(fs._addDbRecord.call_count, 1)
self.assertFalse(fs._saveFile.called)
self.assertIsNone(id)
def test_AddShouldCallInsertOnDatabase(self):
fs = file_server.FileServer()
db = main.sessionDB()
db.insert = Mock(return_value=None)
fs.add(db, "This is file data", "myapp")
self.assertEqual(db.insert.call_count, 1)
|
f849961e75dc956d669813fddb5b13627b224e1e
|
pyang/plugins/name.py
|
pyang/plugins/name.py
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
Use i_latest_revision to ensure we get the latest revision.
|
Use i_latest_revision to ensure we get the latest revision.
|
Python
|
isc
|
mbj4668/pyang,mbj4668/pyang
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
Use i_latest_revision to ensure we get the latest revision.
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
<commit_before>"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
<commit_msg>Use i_latest_revision to ensure we get the latest revision.<commit_after>
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
Use i_latest_revision to ensure we get the latest revision."""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
<commit_before>"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
r = module.search_one('revision')
if r is not None:
rstr = '@%s' % r.arg
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
<commit_msg>Use i_latest_revision to ensure we get the latest revision.<commit_after>"""Name output plugin
"""
import optparse
from pyang import plugin
def pyang_plugin_init():
plugin.register_plugin(NamePlugin())
class NamePlugin(plugin.PyangPlugin):
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['name'] = self
def add_opts(self, optparser):
optlist = [
optparse.make_option("--name-print-revision",
dest="print_revision",
action="store_true",
help="Print the name and revision in name@revision format"),
]
g = optparser.add_option_group("Name output specific options")
g.add_options(optlist)
def setup_fmt(self, ctx):
ctx.implicit_errors = False
def emit(self, ctx, modules, fd):
emit_name(ctx, modules, fd)
def emit_name(ctx, modules, fd):
for module in modules:
bstr = ""
rstr = ""
if ctx.opts.print_revision:
rs = module.i_latest_revision
if rs is None:
r = module.search_one('revision')
if r is not None:
rs = r.arg
if rs is not None:
rstr = '@%s' % rs
b = module.search_one('belongs-to')
if b is not None:
bstr = " (belongs-to %s)" % b.arg
fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
|
2a0dafb41a0e033f58848fd8e325b96466b924b5
|
gamecraft/wsgi.py
|
gamecraft/wsgi.py
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings_docker")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Tweak WSGI module to use docker settings
|
Tweak WSGI module to use docker settings
|
Python
|
mit
|
micktwomey/gamecraft-mk-iii,micktwomey/gamecraft-mk-iii,micktwomey/gamecraft-mk-iii,micktwomey/gamecraft-mk-iii
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Tweak WSGI module to use docker settings
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings_docker")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before>"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Tweak WSGI module to use docker settings<commit_after>
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings_docker")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Tweak WSGI module to use docker settings"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings_docker")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before>"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Tweak WSGI module to use docker settings<commit_after>"""
WSGI config for gamecraft project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gamecraft.settings_docker")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
8707b835d34380f737e7954c7bac527c916b2a7c
|
tests/test_special_features.py
|
tests/test_special_features.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two two one ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan too wan ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un to un un un ")
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
self.addTypeEqualityFunc(str,self.assertSameWords)
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two one")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un")
def assertSameWords(self, tested, reference, msg=None):
set_tested = set(tested.split())
set_reference = set(reference.split())
if set_tested != set_reference:
raise self.failureException(msg if msg is not None else "%r doesn't consist of the same words as %r" % (tested, reference))
if __name__ == "__main__":
unittest.main()
|
Test ignoring duplicates and order
|
Test ignoring duplicates and order
|
Python
|
bsd-3-clause
|
arvindch/speech_recognition,arvindch/speech_recognition,Uberi/speech_recognition,Uberi/speech_recognition
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two two one ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan too wan ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un to un un un ")
if __name__ == "__main__":
unittest.main()
Test ignoring duplicates and order
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
self.addTypeEqualityFunc(str,self.assertSameWords)
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two one")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un")
def assertSameWords(self, tested, reference, msg=None):
set_tested = set(tested.split())
set_reference = set(reference.split())
if set_tested != set_reference:
raise self.failureException(msg if msg is not None else "%r doesn't consist of the same words as %r" % (tested, reference))
if __name__ == "__main__":
unittest.main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two two one ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan too wan ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un to un un un ")
if __name__ == "__main__":
unittest.main()
<commit_msg>Test ignoring duplicates and order<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
self.addTypeEqualityFunc(str,self.assertSameWords)
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two one")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un")
def assertSameWords(self, tested, reference, msg=None):
set_tested = set(tested.split())
set_reference = set(reference.split())
if set_tested != set_reference:
raise self.failureException(msg if msg is not None else "%r doesn't consist of the same words as %r" % (tested, reference))
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two two one ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan too wan ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un to un un un ")
if __name__ == "__main__":
unittest.main()
Test ignoring duplicates and order#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
self.addTypeEqualityFunc(str,self.assertSameWords)
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two one")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un")
def assertSameWords(self, tested, reference, msg=None):
set_tested = set(tested.split())
set_reference = set(reference.split())
if set_tested != set_reference:
raise self.failureException(msg if msg is not None else "%r doesn't consist of the same words as %r" % (tested, reference))
if __name__ == "__main__":
unittest.main()
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two two one ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan too wan ")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un to un un un ")
if __name__ == "__main__":
unittest.main()
<commit_msg>Test ignoring duplicates and order<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import unittest
import speech_recognition as sr
class TestSpecialFeatures(unittest.TestCase):
def setUp(self):
self.AUDIO_FILE_EN = os.path.join(os.path.dirname(os.path.realpath(__file__)), "english.wav")
self.addTypeEqualityFunc(str,self.assertSameWords)
def test_sphinx_keywords(self):
r = sr.Recognizer()
with sr.AudioFile(self.AUDIO_FILE_EN) as source: audio = r.record(source)
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("one", 1.0), ("two", 1.0), ("three", 1.0)]), "three two one")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("wan", 0.95), ("too", 1.0), ("tree", 1.0)]), "tree too wan")
self.assertEqual(r.recognize_sphinx(audio, keyword_entries=[("un", 0.95), ("to", 1.0), ("tee", 1.0)]), "tee to un")
def assertSameWords(self, tested, reference, msg=None):
set_tested = set(tested.split())
set_reference = set(reference.split())
if set_tested != set_reference:
raise self.failureException(msg if msg is not None else "%r doesn't consist of the same words as %r" % (tested, reference))
if __name__ == "__main__":
unittest.main()
|
6f128279e8f4126c2d0f1a4076b93768678cdc0a
|
zerver/migrations/0130_text_choice_in_emojiset.py
|
zerver/migrations/0130_text_choice_in_emojiset.py
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
def reverse_change_emojiset(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emojiset="text"):
# Resetting `emojiset` to "google" (the default) doesn't make an
# exact round trip, but it's nearly indistinguishable -- the setting
# shouldn't really matter while `emoji_alt_code` is true.
user.emoji_alt_code = True
user.emojiset = "google"
user.save(update_fields=["emoji_alt_code", "emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset, reverse_change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
Add reverser for emoji_alt_code migration.
|
migrations: Add reverser for emoji_alt_code migration.
This is easy to do, and prevents this feature from getting a server
admin stuck in potentially a pretty uncomfortable way -- unable to
roll back a deploy.
|
Python
|
apache-2.0
|
tommyip/zulip,eeshangarg/zulip,rht/zulip,jackrzhang/zulip,brainwane/zulip,andersk/zulip,shubhamdhama/zulip,shubhamdhama/zulip,punchagan/zulip,kou/zulip,hackerkid/zulip,timabbott/zulip,showell/zulip,hackerkid/zulip,eeshangarg/zulip,dhcrzf/zulip,andersk/zulip,shubhamdhama/zulip,punchagan/zulip,kou/zulip,synicalsyntax/zulip,hackerkid/zulip,dhcrzf/zulip,showell/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,hackerkid/zulip,rishig/zulip,eeshangarg/zulip,synicalsyntax/zulip,timabbott/zulip,synicalsyntax/zulip,shubhamdhama/zulip,rht/zulip,synicalsyntax/zulip,jackrzhang/zulip,dhcrzf/zulip,kou/zulip,andersk/zulip,andersk/zulip,synicalsyntax/zulip,jackrzhang/zulip,dhcrzf/zulip,brainwane/zulip,jackrzhang/zulip,kou/zulip,punchagan/zulip,zulip/zulip,timabbott/zulip,brainwane/zulip,zulip/zulip,rht/zulip,rht/zulip,tommyip/zulip,tommyip/zulip,tommyip/zulip,jackrzhang/zulip,andersk/zulip,rishig/zulip,synicalsyntax/zulip,tommyip/zulip,zulip/zulip,punchagan/zulip,brainwane/zulip,showell/zulip,eeshangarg/zulip,showell/zulip,rishig/zulip,dhcrzf/zulip,punchagan/zulip,punchagan/zulip,eeshangarg/zulip,showell/zulip,timabbott/zulip,shubhamdhama/zulip,zulip/zulip,showell/zulip,zulip/zulip,brainwane/zulip,eeshangarg/zulip,timabbott/zulip,showell/zulip,jackrzhang/zulip,hackerkid/zulip,rishig/zulip,eeshangarg/zulip,tommyip/zulip,kou/zulip,dhcrzf/zulip,shubhamdhama/zulip,rht/zulip,synicalsyntax/zulip,timabbott/zulip,zulip/zulip,tommyip/zulip,zulip/zulip,kou/zulip,andersk/zulip,hackerkid/zulip,rishig/zulip,timabbott/zulip,punchagan/zulip,rht/zulip,andersk/zulip,rishig/zulip,brainwane/zulip,jackrzhang/zulip,kou/zulip,brainwane/zulip,dhcrzf/zulip,rht/zulip
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
migrations: Add reverser for emoji_alt_code migration.
This is easy to do, and prevents this feature from getting a server
admin stuck in potentially a pretty uncomfortable way -- unable to
roll back a deploy.
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
def reverse_change_emojiset(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emojiset="text"):
# Resetting `emojiset` to "google" (the default) doesn't make an
# exact round trip, but it's nearly indistinguishable -- the setting
# shouldn't really matter while `emoji_alt_code` is true.
user.emoji_alt_code = True
user.emojiset = "google"
user.save(update_fields=["emoji_alt_code", "emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset, reverse_change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
<commit_before>from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
<commit_msg>migrations: Add reverser for emoji_alt_code migration.
This is easy to do, and prevents this feature from getting a server
admin stuck in potentially a pretty uncomfortable way -- unable to
roll back a deploy.<commit_after>
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
def reverse_change_emojiset(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emojiset="text"):
# Resetting `emojiset` to "google" (the default) doesn't make an
# exact round trip, but it's nearly indistinguishable -- the setting
# shouldn't really matter while `emoji_alt_code` is true.
user.emoji_alt_code = True
user.emojiset = "google"
user.save(update_fields=["emoji_alt_code", "emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset, reverse_change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
migrations: Add reverser for emoji_alt_code migration.
This is easy to do, and prevents this feature from getting a server
admin stuck in potentially a pretty uncomfortable way -- unable to
roll back a deploy.from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
def reverse_change_emojiset(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emojiset="text"):
# Resetting `emojiset` to "google" (the default) doesn't make an
# exact round trip, but it's nearly indistinguishable -- the setting
# shouldn't really matter while `emoji_alt_code` is true.
user.emoji_alt_code = True
user.emojiset = "google"
user.save(update_fields=["emoji_alt_code", "emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset, reverse_change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
<commit_before>from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
<commit_msg>migrations: Add reverser for emoji_alt_code migration.
This is easy to do, and prevents this feature from getting a server
admin stuck in potentially a pretty uncomfortable way -- unable to
roll back a deploy.<commit_after>from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# change emojiset to text if emoji_alt_code is true.
def change_emojiset(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emoji_alt_code=True):
user.emojiset = "text"
user.save(update_fields=["emojiset"])
def reverse_change_emojiset(apps: StateApps,
schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.filter(emojiset="text"):
# Resetting `emojiset` to "google" (the default) doesn't make an
# exact round trip, but it's nearly indistinguishable -- the setting
# shouldn't really matter while `emoji_alt_code` is true.
user.emoji_alt_code = True
user.emojiset = "google"
user.save(update_fields=["emoji_alt_code", "emojiset"])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0129_remove_userprofile_autoscroll_forever'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='emojiset',
field=models.CharField(choices=[('google', 'Google'), ('apple', 'Apple'), ('twitter', 'Twitter'), ('emojione', 'EmojiOne'), ('text', 'Plain text')], default='google', max_length=20),
),
migrations.RunPython(change_emojiset, reverse_change_emojiset),
migrations.RemoveField(
model_name='userprofile',
name='emoji_alt_code',
),
]
|
c2b950c043f452a473ed5c726faa540c0118b0f5
|
cs251tk/toolkit/process_student.py
|
cs251tk/toolkit/process_student.py
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
Remove extra whitespace in newline
|
Remove extra whitespace in newline
|
Python
|
mit
|
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
Remove extra whitespace in newline
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
<commit_before>from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
<commit_msg>Remove extra whitespace in newline<commit_after>
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
Remove extra whitespace in newlinefrom cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
<commit_before>from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
<commit_msg>Remove extra whitespace in newline<commit_after>from cs251tk.student import remove
from cs251tk.student import clone_student
from cs251tk.student import stash
from cs251tk.student import pull
from cs251tk.student import checkout_date
from cs251tk.student import record
from cs251tk.student import reset
from cs251tk.student import analyze
def process_student(
student,
*,
assignments,
basedir,
clean,
date,
debug,
interact,
no_check,
no_update,
specs,
stogit_url
):
if clean:
remove(student)
clone_student(student, baseurl=stogit_url)
try:
stash(student, no_update=no_update)
pull(student, no_update=no_update)
checkout_date(student, date=date)
recordings = record(student, specs=specs, to_record=assignments, basedir=basedir, debug=debug, interact=interact)
analysis = analyze(student, specs, check_for_branches=not no_check)
if date:
reset(student)
return analysis, recordings
except Exception as err:
if debug:
raise err
return {'username': student, 'error': err}, []
|
d86701d87e40532197d73b826f076ffa7003003e
|
linspace.py
|
linspace.py
|
def linspace(start, stop, num):
return [(stop*i + start*(num-i)) / num
for i in range(num+1)]
|
#!/usr/bin/env python3
import collections
import collections.abc
class linspace(collections.abc.Sequence):
def __init__(self, start, stop, num):
self.start, self.stop, self.num = start, stop, num
def __len__(self):
return self.num
def __getitem__(self, i):
if i >= self.num:
raise IndexError('linspace object index out of range')
return (self.stop*i + self.start*(self.num-i-1))/(self.num-1)
if __name__ == '__main__':
print(list(linspace(1, 2, 5)))
|
Fix off-by-one error, make it lazy
|
Fix off-by-one error, make it lazy
|
Python
|
mit
|
abarnert/linspace
|
def linspace(start, stop, num):
return [(stop*i + start*(num-i)) / num
for i in range(num+1)]
Fix off-by-one error, make it lazy
|
#!/usr/bin/env python3
import collections
import collections.abc
class linspace(collections.abc.Sequence):
def __init__(self, start, stop, num):
self.start, self.stop, self.num = start, stop, num
def __len__(self):
return self.num
def __getitem__(self, i):
if i >= self.num:
raise IndexError('linspace object index out of range')
return (self.stop*i + self.start*(self.num-i-1))/(self.num-1)
if __name__ == '__main__':
print(list(linspace(1, 2, 5)))
|
<commit_before>def linspace(start, stop, num):
return [(stop*i + start*(num-i)) / num
for i in range(num+1)]
<commit_msg>Fix off-by-one error, make it lazy<commit_after>
|
#!/usr/bin/env python3
import collections
import collections.abc
class linspace(collections.abc.Sequence):
def __init__(self, start, stop, num):
self.start, self.stop, self.num = start, stop, num
def __len__(self):
return self.num
def __getitem__(self, i):
if i >= self.num:
raise IndexError('linspace object index out of range')
return (self.stop*i + self.start*(self.num-i-1))/(self.num-1)
if __name__ == '__main__':
print(list(linspace(1, 2, 5)))
|
def linspace(start, stop, num):
return [(stop*i + start*(num-i)) / num
for i in range(num+1)]
Fix off-by-one error, make it lazy#!/usr/bin/env python3
import collections
import collections.abc
class linspace(collections.abc.Sequence):
def __init__(self, start, stop, num):
self.start, self.stop, self.num = start, stop, num
def __len__(self):
return self.num
def __getitem__(self, i):
if i >= self.num:
raise IndexError('linspace object index out of range')
return (self.stop*i + self.start*(self.num-i-1))/(self.num-1)
if __name__ == '__main__':
print(list(linspace(1, 2, 5)))
|
<commit_before>def linspace(start, stop, num):
return [(stop*i + start*(num-i)) / num
for i in range(num+1)]
<commit_msg>Fix off-by-one error, make it lazy<commit_after>#!/usr/bin/env python3
import collections
import collections.abc
class linspace(collections.abc.Sequence):
def __init__(self, start, stop, num):
self.start, self.stop, self.num = start, stop, num
def __len__(self):
return self.num
def __getitem__(self, i):
if i >= self.num:
raise IndexError('linspace object index out of range')
return (self.stop*i + self.start*(self.num-i-1))/(self.num-1)
if __name__ == '__main__':
print(list(linspace(1, 2, 5)))
|
df34d72695b14ca7ee0cb6c8b6855e2263b13b38
|
test_settings.py
|
test_settings.py
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_categories')
if __name__ == "__main__":
run()
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.run('aldryn_categories')
if __name__ == "__main__":
run()
|
Remove cms dependency in test-runner
|
Remove cms dependency in test-runner
|
Python
|
bsd-3-clause
|
aldryn/aldryn-categories,aldryn/aldryn-categories
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_categories')
if __name__ == "__main__":
run()
Remove cms dependency in test-runner
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.run('aldryn_categories')
if __name__ == "__main__":
run()
|
<commit_before>HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_categories')
if __name__ == "__main__":
run()
<commit_msg>Remove cms dependency in test-runner<commit_after>
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.run('aldryn_categories')
if __name__ == "__main__":
run()
|
HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_categories')
if __name__ == "__main__":
run()
Remove cms dependency in test-runnerHELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.run('aldryn_categories')
if __name__ == "__main__":
run()
|
<commit_before>HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.cms('aldryn_categories')
if __name__ == "__main__":
run()
<commit_msg>Remove cms dependency in test-runner<commit_after>HELPER_SETTINGS = {
'SITE_ID': 1,
'TIME_ZONE': 'Europe/Zurich',
'LANGUAGES': (
('en', 'English'),
('de', 'German'),
('fr', 'French'),
),
'INSTALLED_APPS': [
'parler',
'treebeard',
'aldryn_categories',
],
'PARLER_LANGUAGES': {
1: (
{'code': 'de', },
{'code': 'en', },
{'code': 'fr', },
),
'default': {
# Do not remove or change this value or tests may break.
'hide_untranslated': True,
# Do not remove or change this value or tests may break.
'fallback': 'fr',
}
}
}
def run():
from djangocms_helper import runner
runner.run('aldryn_categories')
if __name__ == "__main__":
run()
|
6f6c743a03d8162abca9e5406e5e6c2e51f77052
|
users/views.py
|
users/views.py
|
# -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
# -*- coding: utf-8 -*-
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
Fix redirect after success registration
|
Fix redirect after success registration
|
Python
|
mit
|
sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda
|
# -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
Fix redirect after success registration
|
# -*- coding: utf-8 -*-
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
<commit_before># -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
<commit_msg>Fix redirect after success registration<commit_after>
|
# -*- coding: utf-8 -*-
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
# -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
Fix redirect after success registration# -*- coding: utf-8 -*-
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
<commit_before># -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
model = User
form_class = SimpleRegistrationForm
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
<commit_msg>Fix redirect after success registration<commit_after># -*- coding: utf-8 -*-
from django.shortcuts import redirect, render_to_response
from django.contrib.auth.models import User
from django.views.generic.detail import DetailView
from core.views import RyndaFormView, RyndaListView
from users.forms import SimpleRegistrationForm
from users.models import Users
class UserDetail(DetailView):
model = User
template_name = 'user_profile.html'
context_object_name = 'u'
class UserList(RyndaListView):
template_name = 'userlist.html'
context_object_name = 'users'
queryset = User.objects.select_related().filter(is_active=True).order_by('date_joined')
paginator_url = '/user/page/'
paginate_by = 10
class CreateUser(RyndaFormView):
template_name = 'registerform_simple.html'
#model = User
form_class = SimpleRegistrationForm
success_url = '/'
def form_valid(self, form):
#print self.request.META['HTTP_HOST']
user = User()
ce = form.cleaned_data
user.email = ce['email']
user.login = ce['email']
user.set_password(ce['password1'])
user.save()
#profile = Users.objects.create(user=user, ipAddr=self.request.META['REMOTE_ADDR'])
#profile.user = user
#profile.email = ce['email']
#profile.ipAddr = vself.request.META['REMOTE_ADDR']
#profile.save()
return redirect(self.success_url)
def create_user(request):
return render_to_response('registerform_simple.html',
{'form': SimpleRegistrationForm(),}
)
|
0e4bcae9b409d18a2b2f818833b0e03762332a80
|
example/example/spiders/custom_kafka_spider.py
|
example/example/spiders/custom_kafka_spider.py
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import KafkaSpider
from example.items import DmozItem
class CustomKafkaSpider(KafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import ListeningKafkaSpider
from ..items import DmozItem
class CustomKafkaSpider(ListeningKafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
Use the correct Spider superclass in the example
|
Use the correct Spider superclass in the example
|
Python
|
apache-2.0
|
dfdeshom/scrapy-kafka
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import KafkaSpider
from example.items import DmozItem
class CustomKafkaSpider(KafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
Use the correct Spider superclass in the example
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import ListeningKafkaSpider
from ..items import DmozItem
class CustomKafkaSpider(ListeningKafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
<commit_before># -*- coding: utf-8 -*-
from scrapy_kafka.spiders import KafkaSpider
from example.items import DmozItem
class CustomKafkaSpider(KafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
<commit_msg>Use the correct Spider superclass in the example<commit_after>
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import ListeningKafkaSpider
from ..items import DmozItem
class CustomKafkaSpider(ListeningKafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import KafkaSpider
from example.items import DmozItem
class CustomKafkaSpider(KafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
Use the correct Spider superclass in the example# -*- coding: utf-8 -*-
from scrapy_kafka.spiders import ListeningKafkaSpider
from ..items import DmozItem
class CustomKafkaSpider(ListeningKafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
<commit_before># -*- coding: utf-8 -*-
from scrapy_kafka.spiders import KafkaSpider
from example.items import DmozItem
class CustomKafkaSpider(KafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
<commit_msg>Use the correct Spider superclass in the example<commit_after># -*- coding: utf-8 -*-
from scrapy_kafka.spiders import ListeningKafkaSpider
from ..items import DmozItem
class CustomKafkaSpider(ListeningKafkaSpider):
name = "dmoz_kafka"
allowed_domains = ["dmoz.org"]
def parse(self, response):
for sel in response.xpath('//ul/li'):
item = DmozItem()
item['title'] = sel.xpath('a/text()').extract()
item['link'] = sel.xpath('a/@href').extract()
item['desc'] = sel.xpath('text()').extract()
yield item
|
1cf82c6efa0550c5a0ba7160f82f77db6e3358ec
|
panoptes/test/test_mount.py
|
panoptes/test/test_mount.py
|
from panoptes.mount.ioptron import iOptronMount
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(__file__)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes')
_Mounts.append(module)
except ImportError as err:
self.logger.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
import os
import importlib
import warnings
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
mount_dir = os.path.dirname(__file__) + '/../mount/'
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(mount_dir)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes.mount')
_Mounts.append(module)
except ImportError as err:
warnings.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
Test file loops over all the mounts
|
Test file loops over all the mounts
|
Python
|
mit
|
AstroHuntsman/POCS,AstroHuntsman/POCS,panoptes/POCS,fmin2958/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,fmin2958/POCS,Guokr1991/POCS,panoptes/POCS,Guokr1991/POCS,joshwalawender/POCS,Guokr1991/POCS,Guokr1991/POCS,panoptes/POCS,AstroHuntsman/POCS,joshwalawender/POCS,fmin2958/POCS
|
from panoptes.mount.ioptron import iOptronMount
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(__file__)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes')
_Mounts.append(module)
except ImportError as err:
self.logger.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
Test file loops over all the mounts
|
import os
import importlib
import warnings
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
mount_dir = os.path.dirname(__file__) + '/../mount/'
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(mount_dir)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes.mount')
_Mounts.append(module)
except ImportError as err:
warnings.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
<commit_before>from panoptes.mount.ioptron import iOptronMount
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(__file__)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes')
_Mounts.append(module)
except ImportError as err:
self.logger.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
<commit_msg>Test file loops over all the mounts<commit_after>
|
import os
import importlib
import warnings
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
mount_dir = os.path.dirname(__file__) + '/../mount/'
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(mount_dir)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes.mount')
_Mounts.append(module)
except ImportError as err:
warnings.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
from panoptes.mount.ioptron import iOptronMount
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(__file__)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes')
_Mounts.append(module)
except ImportError as err:
self.logger.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
Test file loops over all the mountsimport os
import importlib
import warnings
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
mount_dir = os.path.dirname(__file__) + '/../mount/'
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(mount_dir)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes.mount')
_Mounts.append(module)
except ImportError as err:
warnings.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
<commit_before>from panoptes.mount.ioptron import iOptronMount
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(__file__)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes')
_Mounts.append(module)
except ImportError as err:
self.logger.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
<commit_msg>Test file loops over all the mounts<commit_after>import os
import importlib
import warnings
class TestOptronMount:
mount = None
def setup(self):
print ("TestMount:setup() before each test method")
def teardown(self):
print ("TestMount:teardown() after each test method")
@classmethod
def setup_class(cls):
mount_dir = os.path.dirname(__file__) + '/../mount/'
print ("setup_class() before any methods in this class")
_Mounts = []
for name in os.listdir(os.path.dirname(mount_dir)):
if not name.startswith('_') and name.endswith('.py'):
name = '.' + os.path.splitext(name)[0]
try:
module = importlib.import_module(name,'panoptes.mount')
_Mounts.append(module)
except ImportError as err:
warnings.warn('Failed to load mount plugin: {}'.format(err))
@classmethod
def teardown_class(cls):
print ("teardown_class() after any methods in this class")
def test_is_connected_false(self):
pass
def test_connect(self):
pass
def test_is_connected_true(self):
pass
def test_is_slewing(self):
pass
def test_check_coordinates(self):
pass
def test_sync_coordinates(self):
pass
def test_slew_to_coordinates(self):
pass
def test_slew_to_park(self):
pass
def test_echo(self):
pass
|
9ed49cee1ce669547f6d0278af00c3ad246fec78
|
migrations/versions/201608181200_11890f58b1df_add_tracks.py
|
migrations/versions/201608181200_11890f58b1df_add_tracks.py
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
Fix incorrect indexes in alembic revision
|
Fix incorrect indexes in alembic revision
|
Python
|
mit
|
ThiefMaster/indico,ThiefMaster/indico,mic4ael/indico,pferreir/indico,pferreir/indico,mvidalgarcia/indico,indico/indico,ThiefMaster/indico,mvidalgarcia/indico,indico/indico,OmeGak/indico,mic4ael/indico,OmeGak/indico,mvidalgarcia/indico,DirkHoffmann/indico,mic4ael/indico,DirkHoffmann/indico,mvidalgarcia/indico,OmeGak/indico,DirkHoffmann/indico,pferreir/indico,mic4ael/indico,OmeGak/indico,indico/indico,ThiefMaster/indico,DirkHoffmann/indico,pferreir/indico,indico/indico
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
Fix incorrect indexes in alembic revision
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
<commit_before>"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
<commit_msg>Fix incorrect indexes in alembic revision<commit_after>
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
Fix incorrect indexes in alembic revision"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
<commit_before>"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False, index=True),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
<commit_msg>Fix incorrect indexes in alembic revision<commit_after>"""Add tracks
Revision ID: 11890f58b1df
Revises: 4d4b95748173
Create Date: 2016-08-16 16:48:27.441514
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '11890f58b1df'
down_revision = '4d4b95748173'
def upgrade():
op.create_table(
'tracks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
def downgrade():
op.drop_table('tracks', schema='events')
|
9ab879af48e46fae2279402ac9cb242f173f037c
|
javascript_settings/views.py
|
javascript_settings/views.py
|
from django.http import HttpResponse
from django.utils import simplejson
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
import json
from django.http import HttpResponse
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
Use json instead of django.utils.simplejson.
|
Use json instead of django.utils.simplejson.
|
Python
|
mit
|
pozytywnie/django-javascript-settings
|
from django.http import HttpResponse
from django.utils import simplejson
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
Use json instead of django.utils.simplejson.
|
import json
from django.http import HttpResponse
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
<commit_before>from django.http import HttpResponse
from django.utils import simplejson
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
<commit_msg>Use json instead of django.utils.simplejson.<commit_after>
|
import json
from django.http import HttpResponse
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
from django.http import HttpResponse
from django.utils import simplejson
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
Use json instead of django.utils.simplejson.import json
from django.http import HttpResponse
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
<commit_before>from django.http import HttpResponse
from django.utils import simplejson
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
<commit_msg>Use json instead of django.utils.simplejson.<commit_after>import json
from django.http import HttpResponse
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def load_configuration(request):
return HttpResponse(
"var configuration = %s;" % json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
)
|
d6bfe797254823f0be8b00b657061cbfe7d3da15
|
wafer/utils.py
|
wafer/utils.py
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
connection.queries = []
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
del connection.queries[:]
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
Clear queries list instead of overwriting it.
|
Clear queries list instead of overwriting it.
|
Python
|
isc
|
CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
connection.queries = []
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
Clear queries list instead of overwriting it.
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
del connection.queries[:]
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
<commit_before>import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
connection.queries = []
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
<commit_msg>Clear queries list instead of overwriting it.<commit_after>
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
del connection.queries[:]
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
connection.queries = []
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
Clear queries list instead of overwriting it.import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
del connection.queries[:]
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
<commit_before>import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
connection.queries = []
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
<commit_msg>Clear queries list instead of overwriting it.<commit_after>import functools
import unicodedata
from django.core.cache import get_cache
from django.conf import settings
def normalize_unicode(u):
"""Replace non-ASCII characters with closest ASCII equivalents
where possible.
"""
return unicodedata.normalize('NFKD', u).encode('ascii', 'ignore')
def cache_result(cache_key, timeout):
"""A decorator for caching the result of a function."""
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
# replace this with cache.caches when we drop Django 1.6
# compatibility
cache = get_cache(cache_name)
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = get_cache(cache_name)
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator
class QueryTracker(object):
""" Track queries to database. """
def __enter__(self):
from django.conf import settings
from django.db import connection
self._debug = settings.DEBUG
settings.DEBUG = True
del connection.queries[:]
return self
def __exit__(self, *args, **kw):
from django.conf import settings
settings.DEBUG = self._debug
@property
def queries(self):
from django.db import connection
return connection.queries[:]
|
ad8b8d6db5e81884ff5e3270455c714024cccbc1
|
Tools/scripts/findlinksto.py
|
Tools/scripts/findlinksto.py
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import regex
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.error, 'not enough arguments'
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = regex.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) >= 0:
print name, '->', linkto
except os.error:
pass
main()
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
main()
|
Use new name for GetoptError, and pass it two arguments Use re module instead of regex
|
Use new name for GetoptError, and pass it two arguments
Use re module instead of regex
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import regex
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.error, 'not enough arguments'
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = regex.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) >= 0:
print name, '->', linkto
except os.error:
pass
main()
Use new name for GetoptError, and pass it two arguments
Use re module instead of regex
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
main()
|
<commit_before>#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import regex
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.error, 'not enough arguments'
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = regex.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) >= 0:
print name, '->', linkto
except os.error:
pass
main()
<commit_msg>Use new name for GetoptError, and pass it two arguments
Use re module instead of regex<commit_after>
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
main()
|
#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import regex
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.error, 'not enough arguments'
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = regex.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) >= 0:
print name, '->', linkto
except os.error:
pass
main()
Use new name for GetoptError, and pass it two arguments
Use re module instead of regex#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
main()
|
<commit_before>#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import regex
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.error, 'not enough arguments'
except getopt.error, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = regex.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) >= 0:
print name, '->', linkto
except os.error:
pass
main()
<commit_msg>Use new name for GetoptError, and pass it two arguments
Use re module instead of regex<commit_after>#! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
main()
|
678594fb68845d3aec80c935fc0cd0fe89ce26b5
|
shakedown/dcos/service.py
|
shakedown/dcos/service.py
|
from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
|
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
Return None or Empty List
|
Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.
|
Python
|
apache-2.0
|
dcos/shakedown
|
from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.
|
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
<commit_before>from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
<commit_msg>Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.<commit_after>
|
from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
<commit_before>from dcos import (marathon, mesos, package, util)
from dcos.errors import DCOSException
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return False
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['id']:
return service['id']
return False
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service and service['tasks']:
return service['tasks']
return False
<commit_msg>Return None or Empty List
Return None when an object cannot be found or an empty list when a list
type is expected.<commit_after>from dcos import mesos
def get_service(service_name, inactive=False, completed=False):
services = mesos.get_master().frameworks(inactive=inactive, completed=completed)
for service in services:
if service['name'] == service_name:
return service
return None
def get_service_framework_id(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['id']:
return service['id']
return None
def get_service_tasks(service_name, inactive=False, completed=False):
service = get_service(service_name, inactive, completed)
if service is not None and service['tasks']:
return service['tasks']
return []
|
652853221ce9eca84ebbe568fa0c1985915dad59
|
scratch/asb/print_refquad_input.py
|
scratch/asb/print_refquad_input.py
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "integrated" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_integrated.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "indexed" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_indexed.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
Use indexed instead of integrated pickle files for detector refinement.
|
Use indexed instead of integrated pickle files for detector refinement.
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "integrated" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_integrated.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
Use indexed instead of integrated pickle files for detector refinement.
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "indexed" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_indexed.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
<commit_before>from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "integrated" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_integrated.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
<commit_msg>Use indexed instead of integrated pickle files for detector refinement.<commit_after>
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "indexed" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_indexed.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "integrated" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_integrated.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
Use indexed instead of integrated pickle files for detector refinement.from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "indexed" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_indexed.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
<commit_before>from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "integrated" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_integrated.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
<commit_msg>Use indexed instead of integrated pickle files for detector refinement.<commit_after>from __future__ import division
import os, sys
paths = sys.argv[1:]
for path in paths:
for filename in os.listdir(path):
if "indexed" in filename:
print "input {"
print " experiments =", os.path.join(path, filename.rstrip("_indexed.pickle") + "_refined_experiments.json")
print " reflections =", os.path.join(path, filename)
print "}"
|
34aa4a19ac1fc7ff52ea0d9ac13df944f1e9754d
|
src/tn/plonebehavior/template/html_page_html.py
|
src/tn/plonebehavior/template/html_page_html.py
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import _
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
from z3c.form import validator
import collections
import lxml.cssselect
import lxml.html
import zope.interface
isiterable = lambda o: isinstance(o, collections.Iterable)
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
class CSSSelectorValidator(validator.SimpleFieldValidator):
def validate(self, value):
super(CSSSelectorValidator, self).validate(value)
tree = lxml.html.document_fromstring(self.context.html)
xpath = lxml.cssselect.CSSSelector(value).path
selection = tree.xpath(xpath)
if not isiterable(selection) or len(selection) != 1:
raise zope.interface.Invalid(_(
"Expression doesn't select a single element "
"in the HTML page."
))
validator.WidgetValidatorDiscriminators(
CSSSelectorValidator,
context=html_page.IHTMLPageSchema,
field=ITemplateConfiguration['css']
)
grok.global_adapter(CSSSelectorValidator)
|
Add a validation to ensure that CSS selector actually works
|
Add a validation to ensure that CSS selector actually works
|
Python
|
bsd-3-clause
|
tecnologiaenegocios/tn.plonebehavior.template,tecnologiaenegocios/tn.plonebehavior.template
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
Add a validation to ensure that CSS selector actually works
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import _
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
from z3c.form import validator
import collections
import lxml.cssselect
import lxml.html
import zope.interface
isiterable = lambda o: isinstance(o, collections.Iterable)
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
class CSSSelectorValidator(validator.SimpleFieldValidator):
def validate(self, value):
super(CSSSelectorValidator, self).validate(value)
tree = lxml.html.document_fromstring(self.context.html)
xpath = lxml.cssselect.CSSSelector(value).path
selection = tree.xpath(xpath)
if not isiterable(selection) or len(selection) != 1:
raise zope.interface.Invalid(_(
"Expression doesn't select a single element "
"in the HTML page."
))
validator.WidgetValidatorDiscriminators(
CSSSelectorValidator,
context=html_page.IHTMLPageSchema,
field=ITemplateConfiguration['css']
)
grok.global_adapter(CSSSelectorValidator)
|
<commit_before>try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
<commit_msg>Add a validation to ensure that CSS selector actually works<commit_after>
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import _
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
from z3c.form import validator
import collections
import lxml.cssselect
import lxml.html
import zope.interface
isiterable = lambda o: isinstance(o, collections.Iterable)
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
class CSSSelectorValidator(validator.SimpleFieldValidator):
def validate(self, value):
super(CSSSelectorValidator, self).validate(value)
tree = lxml.html.document_fromstring(self.context.html)
xpath = lxml.cssselect.CSSSelector(value).path
selection = tree.xpath(xpath)
if not isiterable(selection) or len(selection) != 1:
raise zope.interface.Invalid(_(
"Expression doesn't select a single element "
"in the HTML page."
))
validator.WidgetValidatorDiscriminators(
CSSSelectorValidator,
context=html_page.IHTMLPageSchema,
field=ITemplateConfiguration['css']
)
grok.global_adapter(CSSSelectorValidator)
|
try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
Add a validation to ensure that CSS selector actually workstry:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import _
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
from z3c.form import validator
import collections
import lxml.cssselect
import lxml.html
import zope.interface
isiterable = lambda o: isinstance(o, collections.Iterable)
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
class CSSSelectorValidator(validator.SimpleFieldValidator):
def validate(self, value):
super(CSSSelectorValidator, self).validate(value)
tree = lxml.html.document_fromstring(self.context.html)
xpath = lxml.cssselect.CSSSelector(value).path
selection = tree.xpath(xpath)
if not isiterable(selection) or len(selection) != 1:
raise zope.interface.Invalid(_(
"Expression doesn't select a single element "
"in the HTML page."
))
validator.WidgetValidatorDiscriminators(
CSSSelectorValidator,
context=html_page.IHTMLPageSchema,
field=ITemplateConfiguration['css']
)
grok.global_adapter(CSSSelectorValidator)
|
<commit_before>try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
<commit_msg>Add a validation to ensure that CSS selector actually works<commit_after>try:
from tn.plonehtmlpage import html_page
HAS_HTML_PAGE = True
except ImportError:
HAS_HTML_PAGE = False
if HAS_HTML_PAGE:
from five import grok
from tn.plonebehavior.template import _
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template.html import ContextlessHTML
from z3c.form import validator
import collections
import lxml.cssselect
import lxml.html
import zope.interface
isiterable = lambda o: isinstance(o, collections.Iterable)
class HTMLPageHTML(grok.Adapter):
grok.context(html_page.IHTMLPageSchema)
grok.implements(interfaces.IHTML)
contextless_factory = ContextlessHTML
def __unicode__(self):
base_url = self.context.absolute_url()
return unicode(self.contextless_factory(base_url,
self.context.html))
class CSSSelectorValidator(validator.SimpleFieldValidator):
def validate(self, value):
super(CSSSelectorValidator, self).validate(value)
tree = lxml.html.document_fromstring(self.context.html)
xpath = lxml.cssselect.CSSSelector(value).path
selection = tree.xpath(xpath)
if not isiterable(selection) or len(selection) != 1:
raise zope.interface.Invalid(_(
"Expression doesn't select a single element "
"in the HTML page."
))
validator.WidgetValidatorDiscriminators(
CSSSelectorValidator,
context=html_page.IHTMLPageSchema,
field=ITemplateConfiguration['css']
)
grok.global_adapter(CSSSelectorValidator)
|
fa977ddff460dc763448835701b4858934498ffb
|
img_pipe/archival_data/single_channel_clean.py
|
img_pipe/archival_data/single_channel_clean.py
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
Add in largest scale with high pb cutoff
|
Add in largest scale with high pb cutoff
|
Python
|
mit
|
e-koch/canfar_scripts,e-koch/canfar_scripts
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
Add in largest scale with high pb cutoff
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
<commit_before>
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
<commit_msg>Add in largest scale with high pb cutoff<commit_after>
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
Add in largest scale with high pb cutoff
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
<commit_before>
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
<commit_msg>Add in largest scale with high pb cutoff<commit_after>
import sys
'''
Cleans an MS with a single channel given a mask and a model
'''
vis = sys.argv[4]
model = sys.argv[5]
mask = sys.argv[6]
out_root = vis[:-3]
clean(vis=vis, imagename=out_root+'.clean', field='M33*',
restfreq='1420.40575177MHz',
mode='channel', width=1, nchan=1, start=1,
cell='1.5arcsec', multiscale=[0, 4, 8, 20, 40, 80],
threshold='2.2mJy/beam', imagermode='mosaic', gain=0.5,
imsize=[4096, 4096], weighting='natural', robust=0.0, niter=50000,
pbcor=True, minpb=0.7, interpolation='linear', usescratch=True,
phasecenter='J2000 01h33m50.904 +30d39m35.79', veltype='radio',
outframe='LSRK', modelimage=model, mask=mask)
|
c3c4b52991706036a27eb4cebf33ea8eaad115d2
|
enchanting2.py
|
enchanting2.py
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import pygame
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
pygame.display.flip()
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
Fix - was flipping display twice
|
Fix - was flipping display twice
Gah. Here is a speedup for pygame -- don't flip the display twice.
|
Python
|
agpl-3.0
|
clintonblackmore/enchanting2,clintonblackmore/enchanting2
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import pygame
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
pygame.display.flip()
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)Fix - was flipping display twice
Gah. Here is a speedup for pygame -- don't flip the display twice.
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import pygame
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
pygame.display.flip()
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)<commit_msg>Fix - was flipping display twice
Gah. Here is a speedup for pygame -- don't flip the display twice.<commit_after>
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import pygame
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
pygame.display.flip()
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)Fix - was flipping display twice
Gah. Here is a speedup for pygame -- don't flip the display twice."""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import pygame
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
pygame.display.flip()
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)<commit_msg>Fix - was flipping display twice
Gah. Here is a speedup for pygame -- don't flip the display twice.<commit_after>"""enchanting2.py
This is the main entry point of the system"""
import sys
import xml.etree.cElementTree as ElementTree
import actor
import media
def main(argv):
"""This is a naive, blocking, co-operatively multitasking approach"""
filename = argv[1] # xml file to open
tree = ElementTree.parse(filename)
project = actor.Project()
project.deserialize(tree.getroot())
# Now, we can run the code
all_actors = [project.stage]
all_actors.extend([sprite for sprite in project.stage.sprites
if isinstance(sprite, actor.BaseActor)])
# Create our media environment
# (now that we have dimensions for the screen)
media_environment = media.PyGameMediaEnvironment()
media_environment.setup_for_project(project)
for sprite in all_actors:
sprite.convert_art(media_environment)
while True:
media_environment.check_for_events()
for sprite in all_actors:
for script in sprite.scripts:
try:
script.step(sprite)
except StopIteration:
pass
# note: the stage is the first sprite in the list, and erases the screen
for sprite in all_actors:
sprite.draw(media_environment)
media_environment.finished_frame()
if __name__ == "__main__":
main(sys.argv)
|
d1d40da564ca82dc58a37893f86acc934bc69cd5
|
api/base/content_negotiation.py
|
api/base/content_negotiation.py
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return DefaultContentNegotiation.select_renderer(self, request, renderers)
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return super(JSONAPIContentNegotiation, self).select_renderer(request, renderers)
|
Use super because only one superclass
|
Use super because only one superclass
|
Python
|
apache-2.0
|
SSJohns/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,chennan47/osf.io,KAsante95/osf.io,laurenrevere/osf.io,njantrania/osf.io,caneruguz/osf.io,adlius/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,zamattiac/osf.io,erinspace/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,brandonPurvis/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,mluo613/osf.io,jmcarp/osf.io,felliott/osf.io,pattisdr/osf.io,chrisseto/osf.io,mfraezz/osf.io,chennan47/osf.io,emetsger/osf.io,doublebits/osf.io,doublebits/osf.io,hmoco/osf.io,erinspace/osf.io,adlius/osf.io,acshi/osf.io,kch8qx/osf.io,wearpants/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,abought/osf.io,CenterForOpenScience/osf.io,MerlinZhang/osf.io,amyshi188/osf.io,danielneis/osf.io,leb2dg/osf.io,petermalcolm/osf.io,jnayak1/osf.io,rdhyee/osf.io,caseyrygt/osf.io,kwierman/osf.io,Johnetordoff/osf.io,mattclark/osf.io,samanehsan/osf.io,binoculars/osf.io,acshi/osf.io,KAsante95/osf.io,petermalcolm/osf.io,jmcarp/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,danielneis/osf.io,SSJohns/osf.io,emetsger/osf.io,GageGaskins/osf.io,caneruguz/osf.io,KAsante95/osf.io,sbt9uc/osf.io,laurenrevere/osf.io,wearpants/osf.io,petermalcolm/osf.io,hmoco/osf.io,billyhunt/osf.io,njantrania/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,cslzchen/osf.io,ckc6cz/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,jnayak1/osf.io,Ghalko/osf.io,adlius/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,felliott/osf.io,adlius/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,icereval/osf.io,caneruguz/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,emetsger/osf.io,icereval/osf.io,mattclark/osf.io,mluo613/osf.io,MerlinZhang/osf.io,aaxelb/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,kch8qx/osf.io,alexschiller/osf.io,sloria/osf.io,binoculars/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,abought/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,zachjanicki/osf.io,abought/osf.io,Nesiehr/osf.io,MerlinZhang/osf.io,crcresearch/osf.io,rdhyee/osf.io,petermalcolm/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,cosenal/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,arpitar/osf.io,cwisecarver/osf.io,ckc6cz/osf.io,jnayak1/osf.io,cosenal/osf.io,ticklemepierce/osf.io,monikagrabowska/osf.io,GageGaskins/osf.io,aaxelb/osf.io,chrisseto/osf.io,saradbowman/osf.io,billyhunt/osf.io,jmcarp/osf.io,acshi/osf.io,hmoco/osf.io,danielneis/osf.io,wearpants/osf.io,cwisecarver/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,mluke93/osf.io,samanehsan/osf.io,mluke93/osf.io,arpitar/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,njantrania/osf.io,zachjanicki/osf.io,mluo613/osf.io,Ghalko/osf.io,emetsger/osf.io,hmoco/osf.io,mattclark/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,aaxelb/osf.io,caseyrollins/osf.io,haoyuchen1992/osf.io,rdhyee/osf.io,ckc6cz/osf.io,cslzchen/osf.io,ckc6cz/osf.io,cwisecarver/osf.io,billyhunt/osf.io,baylee-d/osf.io,mluo613/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,jmcarp/osf.io,sloria/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,cosenal/osf.io,amyshi188/osf.io,kwierman/osf.io,samchrisinger/osf.io,Ghalko/osf.io,arpitar/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,leb2dg/osf.io,mluo613/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,mfraezz/osf.io,Nesiehr/osf.io,jnayak1/osf.io,abought/osf.io,Ghalko/osf.io,leb2dg/osf.io,Nesiehr/osf.io,saradbowman/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,acshi/osf.io,samanehsan/osf.io,amyshi188/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,doublebits/osf.io,caseyrollins/osf.io,mluke93/osf.io,ZobairAlijan/osf.io,zachjanicki/osf.io,cslzchen/osf.io,zamattiac/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,kwierman/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,cosenal/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,SSJohns/osf.io,danielneis/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,felliott/osf.io,kwierman/osf.io,sbt9uc/osf.io,binoculars/osf.io,leb2dg/osf.io,amyshi188/osf.io,pattisdr/osf.io,sbt9uc/osf.io,zachjanicki/osf.io,mluke93/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,ZobairAlijan/osf.io,felliott/osf.io,njantrania/osf.io,chrisseto/osf.io,baylee-d/osf.io,billyhunt/osf.io,erinspace/osf.io,sloria/osf.io,acshi/osf.io,KAsante95/osf.io,TomBaxter/osf.io,icereval/osf.io,kch8qx/osf.io,crcresearch/osf.io,TomBaxter/osf.io,arpitar/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,kch8qx/osf.io
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return DefaultContentNegotiation.select_renderer(self, request, renderers)
Use super because only one superclass
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return super(JSONAPIContentNegotiation, self).select_renderer(request, renderers)
|
<commit_before>from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return DefaultContentNegotiation.select_renderer(self, request, renderers)
<commit_msg>Use super because only one superclass<commit_after>
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return super(JSONAPIContentNegotiation, self).select_renderer(request, renderers)
|
from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return DefaultContentNegotiation.select_renderer(self, request, renderers)
Use super because only one superclassfrom rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return super(JSONAPIContentNegotiation, self).select_renderer(request, renderers)
|
<commit_before>from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return DefaultContentNegotiation.select_renderer(self, request, renderers)
<commit_msg>Use super because only one superclass<commit_after>from rest_framework.negotiation import DefaultContentNegotiation
class JSONAPIContentNegotiation(DefaultContentNegotiation):
def select_renderer(self, request, renderers, format_suffix=None):
"""
If 'application/json' in acceptable media types, use the first renderer in
DEFAULT_RENDERER_CLASSES which should be 'api.base.renderers.JSONAPIRenderer'.
Media_type "application/vnd.api+json". Otherwise, use default select_renderer.
Returns a tuple (renderer, media_type).
"""
accepts = self.get_accept_list(request)
if 'application/json' in accepts:
return (renderers[0], renderers[0].media_type)
return super(JSONAPIContentNegotiation, self).select_renderer(request, renderers)
|
10313adc8b5aab9bcc7e21242ef54effc2262a24
|
accio/__init__.py
|
accio/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'msgpack',
'result_serializer': 'msgpack',
'accept_content': ['msgpack']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'json',
'result_serializer': 'json',
'accept_content': ['json']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
Revert msgpack to json content message
|
Revert msgpack to json content message
|
Python
|
bsd-3-clause
|
silverfix/django-accio
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'msgpack',
'result_serializer': 'msgpack',
'accept_content': ['msgpack']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
Revert msgpack to json content message
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'json',
'result_serializer': 'json',
'accept_content': ['json']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'msgpack',
'result_serializer': 'msgpack',
'accept_content': ['msgpack']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
<commit_msg>Revert msgpack to json content message<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'json',
'result_serializer': 'json',
'accept_content': ['json']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'msgpack',
'result_serializer': 'msgpack',
'accept_content': ['msgpack']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
Revert msgpack to json content message# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'json',
'result_serializer': 'json',
'accept_content': ['json']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'msgpack',
'result_serializer': 'msgpack',
'accept_content': ['msgpack']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
<commit_msg>Revert msgpack to json content message<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import timedelta
from celery import Celery
from accio.basetask import ManagedTask
import django.conf
default_app_config = 'accio.apps.Config'
REDIS_DB_URL = 'redis://127.0.0.1:6379/0'
celery_app = Celery(task_cls=ManagedTask)
celery_app.conf.update({
# accio settings
'ACCIO_CELERY_ENABLED': True,
'ACCIO_ATOMIC': True,
'ACCIO_LOGVAULT_URL': REDIS_DB_URL,
'ACCIO_JOBS_MAX_COUNT': 1000,
# celery settings
'broker_url': REDIS_DB_URL,
'result_backend': REDIS_DB_URL,
'result_expires': int(timedelta(hours=1).total_seconds()),
'worker_redirect_stdouts_level': 'INFO',
'worker_concurrency': 4,
'task_serializer': 'json',
'result_serializer': 'json',
'accept_content': ['json']
})
celery_app.conf.update(**vars(django.conf.settings._wrapped))
celery_app.autodiscover_tasks()
|
d423668902a87c17e73f3521e58571709c9b9283
|
td_biblio/urls.py
|
td_biblio/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
Switch to django new url schema
|
Switch to django new url schema
|
Python
|
mit
|
TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
Switch to django new url schema
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
<commit_msg>Switch to django new url schema<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
Switch to django new url schema# -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from .views import EntryListView
urlpatterns = patterns(
'',
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
)
<commit_msg>Switch to django new url schema<commit_after># -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import EntryListView
urlpatterns = [
# Entry List
url('^$', EntryListView.as_view(), name='entry_list'),
]
|
5f945f5335cd5d989401fe99b0752e98595748c0
|
chainer/functions/evaluation/binary_accuracy.py
|
chainer/functions/evaluation/binary_accuracy.py
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
Use maximum instead of if-statement
|
Use maximum instead of if-statement
|
Python
|
mit
|
cupy/cupy,keisuke-umezawa/chainer,benob/chainer,ktnyt/chainer,anaruse/chainer,AlpacaDB/chainer,ktnyt/chainer,rezoo/chainer,niboshi/chainer,ysekky/chainer,jnishi/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,hvy/chainer,chainer/chainer,okuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,niboshi/chainer,pfnet/chainer,wkentaro/chainer,aonotas/chainer,cupy/cupy,wkentaro/chainer,kashif/chainer,tkerola/chainer,delta2323/chainer,benob/chainer,AlpacaDB/chainer,hvy/chainer,hvy/chainer,kikusu/chainer,niboshi/chainer,kikusu/chainer,okuta/chainer,jnishi/chainer,cupy/cupy,chainer/chainer,wkentaro/chainer,kiyukuta/chainer,ktnyt/chainer,hvy/chainer,chainer/chainer,jnishi/chainer,cupy/cupy,chainer/chainer,okuta/chainer,ronekko/chainer,cemoody/chainer,keisuke-umezawa/chainer
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
Use maximum instead of if-statement
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
<commit_before>import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
<commit_msg>Use maximum instead of if-statement<commit_after>
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
Use maximum instead of if-statementimport numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
<commit_before>import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = (t != self.ignore_label).sum()
if int(count) == 0:
count = 1
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
<commit_msg>Use maximum instead of if-statement<commit_after>import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class BinaryAccuracy(function.Function):
ignore_label = -1
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
t_type.dtype == numpy.int32,
t_type.shape == x_type.shape,
)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
# flatten
y = y.ravel()
t = t.ravel()
c = (y >= 0)
count = xp.maximum(1, (t != self.ignore_label).sum())
return xp.asarray((c == t).sum(dtype='f') / count, dtype='f'),
def binary_accuracy(y, t):
"""Computes binary classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose i-th element
indicates the score of positive at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
If ``t[i] == -1``, correspondig ``x[i]`` is ignored.
Accuracy is zero if all groundtruth labels are ``-1``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return BinaryAccuracy()(y, t)
|
d6e9b587644d6b6a8c0d1e11407811c9a0576461
|
emailfwd/management/commands/export_emailfwd.py
|
emailfwd/management/commands/export_emailfwd.py
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir('data'):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir(output_dir):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
Use output_dir, not hard-coded dir name
|
Use output_dir, not hard-coded dir name
|
Python
|
apache-2.0
|
snswa/swsites,snswa/swsites,snswa/swsites
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir('data'):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
Use output_dir, not hard-coded dir name
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir(output_dir):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
<commit_before>from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir('data'):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
<commit_msg>Use output_dir, not hard-coded dir name<commit_after>
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir(output_dir):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir('data'):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
Use output_dir, not hard-coded dir namefrom __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir(output_dir):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
<commit_before>from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir('data'):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
<commit_msg>Use output_dir, not hard-coded dir name<commit_after>from __future__ import print_function
import os
import sys
from django.core.management.base import BaseCommand
from emailfwd.models import ForwardedEmailAddress
class Command(BaseCommand):
args = '<output_dir>'
help = 'Export the email forwarding data directory'
def handle(self, *args, **options):
output_dir = args[0]
if not os.path.isdir(output_dir) or os.listdir(output_dir):
print('Provide an empty directory that exists', file=sys.stderr)
return 1
for fwd in ForwardedEmailAddress.objects.all():
outname = os.path.join(output_dir, '{0.name}@{0.domain}'.format(fwd))
with open(outname, 'wb') as out:
for dest in fwd.emaildestination_set.all():
print(dest.email, file=out)
|
84b48b9be466ac72bddf5ee6288ff48be26eed62
|
tests/classifier/RandomForestClassifier/RandomForestClassifierPHPTest.py
|
tests/classifier/RandomForestClassifier/RandomForestClassifierPHPTest.py
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=100, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features_w_digits_data(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features_w_digits_data(self):
pass
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=20, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
|
Reduce the number of trees
|
Reduce the number of trees
|
Python
|
bsd-3-clause
|
nok/sklearn-porter
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=100, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features_w_digits_data(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features_w_digits_data(self):
pass
Reduce the number of trees
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=20, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
|
<commit_before># -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=100, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features_w_digits_data(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features_w_digits_data(self):
pass
<commit_msg>Reduce the number of trees<commit_after>
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=20, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=100, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features_w_digits_data(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features_w_digits_data(self):
pass
Reduce the number of trees# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=20, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
|
<commit_before># -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=100, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features_w_digits_data(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features_w_digits_data(self):
pass
<commit_msg>Reduce the number of trees<commit_after># -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.ensemble import RandomForestClassifier
from ..Classifier import Classifier
from ...language.PHP import PHP
class RandomForestClassifierPHPTest(PHP, Classifier, TestCase):
def setUp(self):
super(RandomForestClassifierPHPTest, self).setUp()
self.mdl = RandomForestClassifier(n_estimators=20, random_state=0)
def tearDown(self):
super(RandomForestClassifierPHPTest, self).tearDown()
|
94c48d9f61b8f7e462ce5f7013b29ce2399e4190
|
log4django/views/__init__.py
|
log4django/views/__init__.py
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(request_id__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
Add search by request_id field.
|
Add search by request_id field.
|
Python
|
bsd-3-clause
|
CodeScaleInc/log4django,CodeScaleInc/log4django,CodeScaleInc/log4django
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qsAdd search by request_id field.
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(request_id__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
<commit_before>from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs<commit_msg>Add search by request_id field.<commit_after>
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(request_id__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qsAdd search by request_id field.from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(request_id__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
<commit_before>from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs<commit_msg>Add search by request_id field.<commit_after>from django.db.models import Q
from ..models import LogRecord
def _filter_records(request):
getvars = request.GET
logrecord_qs = LogRecord.objects.all().select_related('app')
# Filtering by get params.
if getvars.get('q'):
q = getvars.get('q')
logrecord_qs = logrecord_qs.filter(
Q(app__name__icontains=q)
| Q(message__icontains=q)
| Q(fileName__icontains=q)
| Q(loggerName__icontains=q)
| Q(exception_message__icontains=q)
| Q(request_id__icontains=q)
| Q(_extra__icontains=q)
)
if getvars.get('app'):
logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))
if getvars.get('logger'):
logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))
if getvars.getlist('level'):
logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))
if getvars.get('from'):
logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))
if getvars.get('to'):
logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))
return logrecord_qs
|
44233af1e6cdc368a866c4a96ee4b1dfa53cc870
|
logya/generate.py
|
logya/generate.py
|
# -*- coding: utf-8 -*-
import shutil
from pathlib import Path
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
#L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = Path(L.paths.public, url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
# -*- coding: utf-8 -*-
import shutil
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = L.paths.public.joinpath(url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
Call build_index and use joinpath.
|
Call build_index and use joinpath.
|
Python
|
mit
|
elaOnMars/logya,elaOnMars/logya,yaph/logya,elaOnMars/logya,yaph/logya
|
# -*- coding: utf-8 -*-
import shutil
from pathlib import Path
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
#L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = Path(L.paths.public, url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
Call build_index and use joinpath.
|
# -*- coding: utf-8 -*-
import shutil
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = L.paths.public.joinpath(url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
<commit_before># -*- coding: utf-8 -*-
import shutil
from pathlib import Path
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
#L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = Path(L.paths.public, url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
<commit_msg>Call build_index and use joinpath.<commit_after>
|
# -*- coding: utf-8 -*-
import shutil
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = L.paths.public.joinpath(url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
# -*- coding: utf-8 -*-
import shutil
from pathlib import Path
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
#L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = Path(L.paths.public, url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
Call build_index and use joinpath.# -*- coding: utf-8 -*-
import shutil
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = L.paths.public.joinpath(url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
<commit_before># -*- coding: utf-8 -*-
import shutil
from pathlib import Path
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
#L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = Path(L.paths.public, url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
<commit_msg>Call build_index and use joinpath.<commit_after># -*- coding: utf-8 -*-
import shutil
from shutil import copytree
from logya.core import Logya
from logya.content import write_collection, write_page
def generate(options):
L = Logya(options)
L.build_index()
if not options.keep:
print('Remove existing public directory.')
shutil.rmtree(L.paths.public, ignore_errors=True)
print(f'Generate site in directory: {L.paths.public.as_posix()}')
if L.paths.static.exists():
print('Copy static files.')
copytree(L.paths.static, L.paths.public, dirs_exist_ok=True) # dirs_exist_ok requires Python 3.8
print('Write documents.')
for url, content in L.index.items():
L.paths.dst = L.paths.public.joinpath(url.lstrip('/'), 'index.html')
if 'doc' in content:
print(f'Write document page to: {L.paths.dst}')
write_page(L.paths.dst, content, L.settings)
elif 'docs' in content:
print(f'Write collection page to: {L.paths.dst}')
write_collection(L.paths.dst, content, L.settings)
|
b5bf31eab3fef21872ce44ada1a14aee9c3216d7
|
mlab-ns-simulator/mlabsim/tests/test_update.py
|
mlab-ns-simulator/mlabsim/tests/test_update.py
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
Python
|
apache-2.0
|
hellais/ooni-support,m-lab/ooni-support,m-lab/ooni-support,hellais/ooni-support
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
<commit_before>import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
<commit_msg>Update test_render_PUT_valid_parameters to be an approximate first draft.<commit_after>
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
Update test_render_PUT_valid_parameters to be an approximate first draft.import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
<commit_before>import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
<commit_msg>Update test_render_PUT_valid_parameters to be an approximate first draft.<commit_after>import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
021225cbce30b70c350133f5ae3cae9409bdd6ae
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
Add filters to analyzing admin
|
Add filters to analyzing admin
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
Add filters to analyzing admin
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
<commit_msg>Add filters to analyzing admin<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
Add filters to analyzing admin# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
<commit_msg>Add filters to analyzing admin<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
0af64c176d162f01e78814cae7b98935c01eee82
|
neuroimaging/algorithms/statistics/__init__.py
|
neuroimaging/algorithms/statistics/__init__.py
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
Fix test funcs in algorithms packaging.
|
Fix test funcs in algorithms packaging.
|
Python
|
bsd-3-clause
|
matthew-brett/draft-statsmodels,matthew-brett/draft-statsmodels
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
Fix test funcs in algorithms packaging.
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
<commit_msg>Fix test funcs in algorithms packaging.<commit_after>
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
Fix test funcs in algorithms packaging."""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
<commit_msg>Fix test funcs in algorithms packaging.<commit_after>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
3773132aa24f1b7f9e3eb104274b0960eee12d14
|
froide/foirequest/templatetags/foirequest_tags.py
|
froide/foirequest/templatetags/foirequest_tags.py
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
description = description.replace("\r\n", "\n")
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
Replace uni linebreaks with simple linefeeds in order to make highlighting work
|
Replace uni linebreaks with simple linefeeds in order to make highlighting work
|
Python
|
mit
|
catcosmo/froide,LilithWittmann/froide,stefanw/froide,stefanw/froide,CodeforHawaii/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,fin/froide,CodeforHawaii/froide,catcosmo/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,okfse/froide,catcosmo/froide,fin/froide,CodeforHawaii/froide,ryankanno/froide,ryankanno/froide,ryankanno/froide,LilithWittmann/froide,CodeforHawaii/froide,CodeforHawaii/froide,okfse/froide,okfse/froide,LilithWittmann/froide,catcosmo/froide,okfse/froide,fin/froide,ryankanno/froide,ryankanno/froide,LilithWittmann/froide
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
Replace uni linebreaks with simple linefeeds in order to make highlighting work
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
description = description.replace("\r\n", "\n")
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
<commit_msg>Replace uni linebreaks with simple linefeeds in order to make highlighting work<commit_after>
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
description = description.replace("\r\n", "\n")
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
Replace uni linebreaks with simple linefeeds in order to make highlighting workfrom django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
description = description.replace("\r\n", "\n")
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
<commit_msg>Replace uni linebreaks with simple linefeeds in order to make highlighting work<commit_after>from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
register = template.Library()
def highlight_request(message):
content = message.get_content()
description = message.request.description
description = description.replace("\r\n", "\n")
try:
index = content.index(description)
except ValueError:
return content
offset = index + len(description)
return mark_safe('%s<div class="highlight">%s</div>%s' % (escape(content[:index]),
escape(description), escape(content[offset:])))
register.simple_tag(highlight_request)
|
4fec805a0a6c04ac16fd4439298a4fa05709c7ea
|
armstrong/hatband/tests/hatband_support/admin.py
|
armstrong/hatband/tests/hatband_support/admin.py
|
from armstrong import hatband
from hatband_support import models
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabbedInline(hatband.TabbedInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabbedInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
from armstrong import hatband
from . import models
from django.db.models import TextField
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabularInline(hatband.TabularInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabularInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
Fix these class names and imports so it works
|
Fix these class names and imports so it works
|
Python
|
apache-2.0
|
armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband
|
from armstrong import hatband
from hatband_support import models
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabbedInline(hatband.TabbedInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabbedInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
Fix these class names and imports so it works
|
from armstrong import hatband
from . import models
from django.db.models import TextField
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabularInline(hatband.TabularInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabularInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
<commit_before>from armstrong import hatband
from hatband_support import models
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabbedInline(hatband.TabbedInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabbedInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
<commit_msg>Fix these class names and imports so it works<commit_after>
|
from armstrong import hatband
from . import models
from django.db.models import TextField
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabularInline(hatband.TabularInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabularInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
from armstrong import hatband
from hatband_support import models
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabbedInline(hatband.TabbedInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabbedInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
Fix these class names and imports so it worksfrom armstrong import hatband
from . import models
from django.db.models import TextField
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabularInline(hatband.TabularInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabularInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
<commit_before>from armstrong import hatband
from hatband_support import models
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
models.TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabbedInline(hatband.TabbedInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabbedInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
<commit_msg>Fix these class names and imports so it works<commit_after>from armstrong import hatband
from . import models
from django.db.models import TextField
from django.forms.widgets import TextInput
class ArticleAdmin(hatband.ModelAdmin):
class Meta:
model = models.TestArticle
class ArticleOverrideAdmin(hatband.ModelAdmin):
formfield_overrides = {
TextField: {'widget': TextInput},
}
class Meta:
model = models.TestArticle
class ArticleTabularInline(hatband.TabularInline):
class Meta:
model = models.TestArticle
class ArticleStackedInline(hatband.StackedInline):
class Meta:
model = models.TestArticle
class CategoryAdminTabbed(hatband.ModelAdmin):
inlines = ArticleTabularInline
class Meta:
model = models.TestCategory
class CategoryAdminStacked(hatband.ModelAdmin):
inlines = ArticleStackedInline
class Meta:
model = models.TestCategory
|
3595bffb71f415999847f323af36737a41ce4b56
|
main.py
|
main.py
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/update", methods=["POST"])
def update():
print json.dumps(request.form['payload'])
return
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/latest")
def latest():
return lastCommit
@app.route("/update", methods=["POST"])
def update():
payload = json.dumps(request.form['payload'])
lastCommit = payload['commits'][0]['message']
return ""
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
Add code to show last commit message
|
Add code to show last commit message
|
Python
|
mit
|
LinuxMercedes/IntegralGit,LinuxMercedes/IntegralGit
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/update", methods=["POST"])
def update():
print json.dumps(request.form['payload'])
return
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
Add code to show last commit message
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/latest")
def latest():
return lastCommit
@app.route("/update", methods=["POST"])
def update():
payload = json.dumps(request.form['payload'])
lastCommit = payload['commits'][0]['message']
return ""
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/update", methods=["POST"])
def update():
print json.dumps(request.form['payload'])
return
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
<commit_msg>Add code to show last commit message<commit_after>
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/latest")
def latest():
return lastCommit
@app.route("/update", methods=["POST"])
def update():
payload = json.dumps(request.form['payload'])
lastCommit = payload['commits'][0]['message']
return ""
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/update", methods=["POST"])
def update():
print json.dumps(request.form['payload'])
return
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
Add code to show last commit messagefrom flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/latest")
def latest():
return lastCommit
@app.route("/update", methods=["POST"])
def update():
payload = json.dumps(request.form['payload'])
lastCommit = payload['commits'][0]['message']
return ""
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/update", methods=["POST"])
def update():
print json.dumps(request.form['payload'])
return
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
<commit_msg>Add code to show last commit message<commit_after>from flask import Flask, request
from pprint import pprint
import json
app = Flask(__name__)
lastCommit = "No recorded commits!"
@app.route("/")
def hello():
return "IntegralGit: continuous integration via GitHub"
@app.route("/latest")
def latest():
return lastCommit
@app.route("/update", methods=["POST"])
def update():
payload = json.dumps(request.form['payload'])
lastCommit = payload['commits'][0]['message']
return ""
if __name__=="__main__":
app.run(host="0.0.0.0", debug=True)
|
774a392695ebdca04ddc83f351a656e2abe19471
|
messagehandler.py
|
messagehandler.py
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return -1
elif answer == 'y':
return 0
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return False
elif answer == 'y':
return True
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
Make function return false or true.
|
Make function return false or true.
|
Python
|
bsd-3-clause
|
rockwolf/python_generic
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return -1
elif answer == 'y':
return 0
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
Make function return false or true.
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return False
elif answer == 'y':
return True
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
<commit_before>"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return -1
elif answer == 'y':
return 0
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
<commit_msg>Make function return false or true.<commit_after>
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return False
elif answer == 'y':
return True
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return -1
elif answer == 'y':
return 0
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
Make function return false or true."""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return False
elif answer == 'y':
return True
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
<commit_before>"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return -1
elif answer == 'y':
return 0
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
<commit_msg>Make function return false or true.<commit_after>"""
This file is part of Lisa.
Lisa is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lisa is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lisa. If not, see <http://www.gnu.org/licenses/>.
"""
class MessageHandler:
""" A general class to handle messages """
def __init__(self):
""" Init of MessageHandler class """
def confirmation(self, stract):
""" Show confirmation dialog """
answer = \
raw_input(
'Are you sure you want to %s? [y|n] ' % stract \
).strip().lower()
if answer != 'y':
print('Aborted.')
return False
elif answer == 'y':
return True
def print_action(self, stract, dictstr):
""" Print message about straction for each item in the lstObj list """
for s in dictstr.keys():
print('{0} {1}.'.format(stract, s))
class ErrorHandler(MessageHandler):
""" A class to handle error messages, it inherits from MessageHandler """
def __init__(self):
""" Init of ErrorHandler class """
print('ErrorHandling loaded...')
|
d95f2059a753855d373332df0b748d52bba0210d
|
main.py
|
main.py
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
else:
click.echo("Bye!")
if __name__ == '__main__':
main()
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
click.echo("Bye!")
if __name__ == '__main__':
main()
|
Fix to ensure program echos "bye" whenever program is quit
|
Fix to ensure program echos "bye" whenever program is quit
|
Python
|
mit
|
amrishparmar/mal_cl_interface
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
else:
click.echo("Bye!")
if __name__ == '__main__':
main()
Fix to ensure program echos "bye" whenever program is quit
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
click.echo("Bye!")
if __name__ == '__main__':
main()
|
<commit_before>import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
else:
click.echo("Bye!")
if __name__ == '__main__':
main()
<commit_msg>Fix to ensure program echos "bye" whenever program is quit<commit_after>
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
click.echo("Bye!")
if __name__ == '__main__':
main()
|
import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
else:
click.echo("Bye!")
if __name__ == '__main__':
main()
Fix to ensure program echos "bye" whenever program is quitimport menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
click.echo("Bye!")
if __name__ == '__main__':
main()
|
<commit_before>import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
else:
click.echo("Bye!")
if __name__ == '__main__':
main()
<commit_msg>Fix to ensure program echos "bye" whenever program is quit<commit_after>import menus
import auth
import click
def main():
"""Main function"""
credentials = auth.authenticate_user()
if credentials:
menus.main_menu(credentials)
click.echo("Bye!")
if __name__ == '__main__':
main()
|
5863f46280697be7e14ae9a8e6bb08a42ff940ac
|
resource_scheduler/views.py
|
resource_scheduler/views.py
|
from django.http import HttpResponse
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
return render_to_response("resource.html", mdict)
|
from django.http import Http404
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
try:
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
except Resource.DoesNotExist:
raise Http404("No resources with that primary key were found.")
return render_to_response("resource.html", mdict)
|
Add check to make sure resource exists
|
Add check to make sure resource exists
|
Python
|
mit
|
simon-andrews/django-resource-scheduler,simon-andrews/django-resource-scheduler
|
from django.http import HttpResponse
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
return render_to_response("resource.html", mdict)
Add check to make sure resource exists
|
from django.http import Http404
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
try:
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
except Resource.DoesNotExist:
raise Http404("No resources with that primary key were found.")
return render_to_response("resource.html", mdict)
|
<commit_before>from django.http import HttpResponse
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
return render_to_response("resource.html", mdict)
<commit_msg>Add check to make sure resource exists<commit_after>
|
from django.http import Http404
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
try:
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
except Resource.DoesNotExist:
raise Http404("No resources with that primary key were found.")
return render_to_response("resource.html", mdict)
|
from django.http import HttpResponse
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
return render_to_response("resource.html", mdict)
Add check to make sure resource existsfrom django.http import Http404
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
try:
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
except Resource.DoesNotExist:
raise Http404("No resources with that primary key were found.")
return render_to_response("resource.html", mdict)
|
<commit_before>from django.http import HttpResponse
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
return render_to_response("resource.html", mdict)
<commit_msg>Add check to make sure resource exists<commit_after>from django.http import Http404
from django.shortcuts import render_to_response
from .models import Resource
def index(request):
return render_to_response("index.html")
def allresources(request):
mdict = {
"resources": Resource.objects.all()
}
return render_to_response("resources_main.html", mdict)
def specificresource(request, resource_pk):
try:
mdict = {
"resource": Resource.objects.get(pk=resource_pk)
}
except Resource.DoesNotExist:
raise Http404("No resources with that primary key were found.")
return render_to_response("resource.html", mdict)
|
47150a81ed780f057a643ce33bd88b9ab5546867
|
pronto_praise/pronto_praise/settings/heroku.py
|
pronto_praise/pronto_praise/settings/heroku.py
|
import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
import dj_database_url
from .base import *
# WhiteNoise won't work on Heroku when we set DEBUG = False
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
Set DEBUG = True to make WhiteNoise works
|
Set DEBUG = True to make WhiteNoise works
|
Python
|
mit
|
prontotools/pronto-praise,prontotools/pronto-praise,prontotools/pronto-praise,prontotools/pronto-praise
|
import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
Set DEBUG = True to make WhiteNoise works
|
import dj_database_url
from .base import *
# WhiteNoise won't work on Heroku when we set DEBUG = False
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
<commit_before>import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
<commit_msg>Set DEBUG = True to make WhiteNoise works<commit_after>
|
import dj_database_url
from .base import *
# WhiteNoise won't work on Heroku when we set DEBUG = False
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
Set DEBUG = True to make WhiteNoise worksimport dj_database_url
from .base import *
# WhiteNoise won't work on Heroku when we set DEBUG = False
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
<commit_before>import dj_database_url
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
<commit_msg>Set DEBUG = True to make WhiteNoise works<commit_after>import dj_database_url
from .base import *
# WhiteNoise won't work on Heroku when we set DEBUG = False
DEBUG = True
ALLOWED_HOSTS = ['*']
MIDDLEWARE = MIDDLEWARE + [
'whitenoise.middleware.WhiteNoiseMiddleware',
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'] = dj_database_url.config()
|
4fecbff12c4ebcd63ca2d43e608da95758909b46
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
from app import models
|
Add import models to app
|
Add import models to app
|
Python
|
mit
|
mdsrosa/routes_api_python
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
Add import models to app
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
from app import models
|
<commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
<commit_msg>Add import models to app<commit_after>
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
from app import models
|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
Add import models to appfrom flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
from app import models
|
<commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
<commit_msg>Add import models to app<commit_after>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# load configuration file
app.config.from_object('config')
# database initialization
db = SQLAlchemy(app)
from app import models
|
b3adf26f8b7353d3b0581cdca533eb03ee24b207
|
bin/verify_cached_graphs.py
|
bin/verify_cached_graphs.py
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
# Fix problem.
flow.set_cached_graph(graph, ignore_balances)
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
Fix cached graph out-of-sync when checking.
|
Fix cached graph out-of-sync when checking.
|
Python
|
agpl-3.0
|
rfugger/villagescc,rfugger/villagescc,rfugger/villagescc,rfugger/villagescc
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
Fix cached graph out-of-sync when checking.
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
# Fix problem.
flow.set_cached_graph(graph, ignore_balances)
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
<commit_msg>Fix cached graph out-of-sync when checking.<commit_after>
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
# Fix problem.
flow.set_cached_graph(graph, ignore_balances)
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
Fix cached graph out-of-sync when checking.#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
# Fix problem.
flow.set_cached_graph(graph, ignore_balances)
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
<commit_msg>Fix cached graph out-of-sync when checking.<commit_after>#!/usr/bin/env python
import sys
from pprint import pprint as pp
from cc.payment import flow
def verify():
for ignore_balances in (True, False):
graph = flow.build_graph(ignore_balances)
cached = flow.get_cached_graph(ignore_balances)
if not cached:
flow.set_cached_graph(graph, ignore_balances)
continue
diff = compare(cached, graph)
if diff:
# Fix problem.
flow.set_cached_graph(graph, ignore_balances)
print "Ignore balances: %s" % ignore_balances
pp(diff)
return False
return True
def compare(g1, g2):
e1 = set(normalize(g1.edges(data=True)))
e2 = set(normalize(g2.edges(data=True)))
return e1.symmetric_difference(e2)
def normalize(edge_list):
return ((src, dest, data['capacity'], data['weight'], data['creditline_id'])
for src, dest, data in edge_list)
if __name__ == '__main__':
if verify():
print 'OK.'
sys.exit(0)
else:
print 'Mismatch.'
sys.exit(1)
|
ea6a8de791bf200da2fe5e54a9f9ca68314f3489
|
forum/admin.py
|
forum/admin.py
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
admin.site.register(Category)
admin.site.register(Forum)
admin.site.register(Thread)
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
class ForumInline(admin.StackedInline):
model = Forum
class CategoryAdmin(admin.ModelAdmin):
inlines = [ForumInline]
admin.site.register(Category, CategoryAdmin)
admin.site.register(Thread)
|
Modify forums directly in categories.
|
Modify forums directly in categories.
|
Python
|
mit
|
xfix/NextBoard
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
admin.site.register(Category)
admin.site.register(Forum)
admin.site.register(Thread)
Modify forums directly in categories.
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
class ForumInline(admin.StackedInline):
model = Forum
class CategoryAdmin(admin.ModelAdmin):
inlines = [ForumInline]
admin.site.register(Category, CategoryAdmin)
admin.site.register(Thread)
|
<commit_before>from django.contrib import admin
from forum.models import Category, Forum, Thread
admin.site.register(Category)
admin.site.register(Forum)
admin.site.register(Thread)
<commit_msg>Modify forums directly in categories.<commit_after>
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
class ForumInline(admin.StackedInline):
model = Forum
class CategoryAdmin(admin.ModelAdmin):
inlines = [ForumInline]
admin.site.register(Category, CategoryAdmin)
admin.site.register(Thread)
|
from django.contrib import admin
from forum.models import Category, Forum, Thread
admin.site.register(Category)
admin.site.register(Forum)
admin.site.register(Thread)
Modify forums directly in categories.from django.contrib import admin
from forum.models import Category, Forum, Thread
class ForumInline(admin.StackedInline):
model = Forum
class CategoryAdmin(admin.ModelAdmin):
inlines = [ForumInline]
admin.site.register(Category, CategoryAdmin)
admin.site.register(Thread)
|
<commit_before>from django.contrib import admin
from forum.models import Category, Forum, Thread
admin.site.register(Category)
admin.site.register(Forum)
admin.site.register(Thread)
<commit_msg>Modify forums directly in categories.<commit_after>from django.contrib import admin
from forum.models import Category, Forum, Thread
class ForumInline(admin.StackedInline):
model = Forum
class CategoryAdmin(admin.ModelAdmin):
inlines = [ForumInline]
admin.site.register(Category, CategoryAdmin)
admin.site.register(Thread)
|
371df3363677118d59315e66523aefb081c67282
|
astroML/plotting/settings.py
|
astroML/plotting/settings.py
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
|
Update the mpl rcparams for mpl 2.0+
|
Update the mpl rcparams for mpl 2.0+
|
Python
|
bsd-2-clause
|
astroML/astroML
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
Update the mpl rcparams for mpl 2.0+
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
|
<commit_before>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
<commit_msg>Update the mpl rcparams for mpl 2.0+<commit_after>
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
|
def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
Update the mpl rcparams for mpl 2.0+def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
|
<commit_before>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
<commit_msg>Update the mpl rcparams for mpl 2.0+<commit_after>def setup_text_plots(fontsize=8, usetex=True):
"""
This function adjusts matplotlib settings so that all figures in the
textbook have a uniform format and look.
"""
import matplotlib
from distutils.version import LooseVersion
matplotlib.rc('legend', fontsize=fontsize, handlelength=3)
matplotlib.rc('axes', titlesize=fontsize)
matplotlib.rc('axes', labelsize=fontsize)
matplotlib.rc('xtick', labelsize=fontsize)
matplotlib.rc('ytick', labelsize=fontsize)
matplotlib.rc('text', usetex=usetex)
matplotlib.rc('font', size=fontsize, family='serif',
style='normal', variant='normal',
stretch='normal', weight='normal')
matplotlib.rc('patch', force_edgecolor=True)
if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"):
matplotlib.rc('_internal', classic_mode=True)
else:
# New in mpl 3.1
matplotlib.rc('scatter.edgecolors', 'b')
matplotlib.rc('grid', linestyle=':')
|
261393eb46cdc082b60d9ea11ec862f508632ad2
|
audit_log/models/__init__.py
|
audit_log/models/__init__.py
|
from django.db.models import Model
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
from django.db.models import Model, SET_NULL
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(on_delete=SET_NULL, null=True, verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(on_delete=SET_NULL, null=True, verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
Add mandatory `on_delete` and allow nulls
|
Add mandatory `on_delete` and allow nulls
I assume it's better to allow nulls than to have auth stamped models
disappear with deleted users.
|
Python
|
bsd-3-clause
|
Atomidata/django-audit-log,Atomidata/django-audit-log
|
from django.db.models import Model
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
Add mandatory `on_delete` and allow nulls
I assume it's better to allow nulls than to have auth stamped models
disappear with deleted users.
|
from django.db.models import Model, SET_NULL
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(on_delete=SET_NULL, null=True, verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(on_delete=SET_NULL, null=True, verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
<commit_before>from django.db.models import Model
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
<commit_msg>Add mandatory `on_delete` and allow nulls
I assume it's better to allow nulls than to have auth stamped models
disappear with deleted users.<commit_after>
|
from django.db.models import Model, SET_NULL
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(on_delete=SET_NULL, null=True, verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(on_delete=SET_NULL, null=True, verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
from django.db.models import Model
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
Add mandatory `on_delete` and allow nulls
I assume it's better to allow nulls than to have auth stamped models
disappear with deleted users.from django.db.models import Model, SET_NULL
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(on_delete=SET_NULL, null=True, verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(on_delete=SET_NULL, null=True, verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
<commit_before>from django.db.models import Model
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
<commit_msg>Add mandatory `on_delete` and allow nulls
I assume it's better to allow nulls than to have auth stamped models
disappear with deleted users.<commit_after>from django.db.models import Model, SET_NULL
from django.utils.translation import ugettext_lazy as _
from audit_log.models.fields import CreatingUserField, CreatingSessionKeyField, LastUserField, LastSessionKeyField
class AuthStampedModel(Model):
"""
An abstract base class model that provides auth and session information
fields.
"""
created_by = CreatingUserField(on_delete=SET_NULL, null=True, verbose_name = _("created by"), related_name = "created_%(app_label)s_%(class)s_set")
created_with_session_key = CreatingSessionKeyField(_("created with session key"))
modified_by = LastUserField(on_delete=SET_NULL, null=True, verbose_name = _("modified by"), related_name = "modified_%(app_label)s_%(class)s_set")
modified_with_session_key = LastSessionKeyField(_("modified with session key"))
class Meta:
abstract = True
|
bf70f8e3235c140589e9b0110b34da8427ab409b
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
child_sync_typo3/wizard/delegate_child_wizard.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Fix res returned on delegate
|
Fix res returned on delegate
|
Python
|
agpl-3.0
|
MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Fix res returned on delegate
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
<commit_msg>Fix res returned on delegate<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Fix res returned on delegate# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context)
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
<commit_msg>Fix res returned on delegate<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <david@coninckx.com>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class delegate_child_wizard(orm.TransientModel):
_inherit = 'delegate.child.wizard'
def delegate(self, cr, uid, ids, context=None):
child_ids = self._default_child_ids(cr, uid, context)
child_obj = self.pool.get('compassion.child')
typo3_to_remove_ids = list()
for child in child_obj.browse(cr, uid, child_ids, context):
if (child.state == 'I'):
typo3_to_remove_ids.append(child.id)
if typo3_to_remove_ids:
res = child_obj.child_remove_from_typo3(
cr, uid, typo3_to_remove_ids, context)
res = super(delegate_child_wizard, self).delegate(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
c845570d8ce6217f5943d538700e207f0221841e
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="",
license=LICENSE,
packages=PACKAGES
)
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="https://github.com/cjdrake/pyeda",
license=LICENSE,
packages=PACKAGES
)
|
Add github as project URL
|
Add github as project URL
|
Python
|
bsd-2-clause
|
karissa/pyeda,karissa/pyeda,cjdrake/pyeda,cjdrake/pyeda,pombredanne/pyeda,GtTmy/pyeda,GtTmy/pyeda,pombredanne/pyeda,cjdrake/pyeda,sschnug/pyeda,sschnug/pyeda,karissa/pyeda,sschnug/pyeda,pombredanne/pyeda,GtTmy/pyeda
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="",
license=LICENSE,
packages=PACKAGES
)
Add github as project URL
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="https://github.com/cjdrake/pyeda",
license=LICENSE,
packages=PACKAGES
)
|
<commit_before># -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="",
license=LICENSE,
packages=PACKAGES
)
<commit_msg>Add github as project URL<commit_after>
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="https://github.com/cjdrake/pyeda",
license=LICENSE,
packages=PACKAGES
)
|
# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="",
license=LICENSE,
packages=PACKAGES
)
Add github as project URL# -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="https://github.com/cjdrake/pyeda",
license=LICENSE,
packages=PACKAGES
)
|
<commit_before># -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="",
license=LICENSE,
packages=PACKAGES
)
<commit_msg>Add github as project URL<commit_after># -*- coding: utf-8 -*-
__copyright__ = "Copyright (c) 2012, Chris Drake"
__license__ = "All rights reserved."
# standard library
from distutils.core import setup
# pyeda
from pyeda import __version__
with open("README.rst") as fin:
README = fin.read()
with open("LICENSE") as fin:
LICENSE = fin.read()
PACKAGES = ["pyeda"]
setup(
name="pyeda",
version=__version__,
description="Python Electronic Design Automation",
long_description=README,
author="Chris Drake",
author_email="cjdrake AT gmail DOT com",
url="https://github.com/cjdrake/pyeda",
license=LICENSE,
packages=PACKAGES
)
|
f39a3934e4be271f2cc061e44bee34c6d6cb4126
|
setup.py
|
setup.py
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
download_url='https://github.com/felipeochoa/minecart/tarball/0.3.0',
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
Add a download url for the tarball from github
|
Add a download url for the tarball from github
|
Python
|
mit
|
felipeochoa/minecart
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
Add a download url for the tarball from github
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
download_url='https://github.com/felipeochoa/minecart/tarball/0.3.0',
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
<commit_before>from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
<commit_msg>Add a download url for the tarball from github<commit_after>
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
download_url='https://github.com/felipeochoa/minecart/tarball/0.3.0',
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
Add a download url for the tarball from githubfrom os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
download_url='https://github.com/felipeochoa/minecart/tarball/0.3.0',
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
<commit_before>from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
<commit_msg>Add a download url for the tarball from github<commit_after>from os import path
from setuptools import setup
README = path.join(path.dirname(path.abspath(__file__)), "README.rst")
setup(
name="minecart",
version="0.3.0",
description=("Simple, Pythonic extraction of images, text, and shapes "
"from PDFs"),
long_description=open(README).read(),
author="Felipe Ochoa",
author_email="find me through Github",
url="https://github.com/felipeochoa/minecart",
download_url='https://github.com/felipeochoa/minecart/tarball/0.3.0',
license="MIT",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: MIT License',
],
keywords='pdf pdfminer extract mining images',
install_requires=['pdfminer3k', 'six'],
extras_require={
'PIL': ['Pillow'],
},
packages=["minecart"],
)
|
06d46159c2742c760b8cd589bf715a96805b870b
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if not IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
test_suite='cnxepub.tests',
zip_safe=False,
)
|
Add mock as a test dependency if running as python2
|
Add mock as a test dependency if running as python2
Test dependencies only get used if running as python setup.py test, so
need to add test_suite
|
Python
|
agpl-3.0
|
Connexions/cnx-epub,Connexions/cnx-epub,Connexions/cnx-epub
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
zip_safe=False,
)
Add mock as a test dependency if running as python2
Test dependencies only get used if running as python setup.py test, so
need to add test_suite
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if not IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
test_suite='cnxepub.tests',
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
zip_safe=False,
)
<commit_msg>Add mock as a test dependency if running as python2
Test dependencies only get used if running as python setup.py test, so
need to add test_suite<commit_after>
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if not IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
test_suite='cnxepub.tests',
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
zip_safe=False,
)
Add mock as a test dependency if running as python2
Test dependencies only get used if running as python setup.py test, so
need to add test_suite# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if not IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
test_suite='cnxepub.tests',
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
zip_safe=False,
)
<commit_msg>Add mock as a test dependency if running as python2
Test dependencies only get used if running as python setup.py test, so
need to add test_suite<commit_after># -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
IS_PY3 = sys.version_info > (3,)
install_requires = [
'jinja2',
'lxml',
]
tests_require = [
]
extras_require = {
'test': tests_require,
}
description = "Library for building and paring Connexions' EPUBs."
if not IS_PY3:
tests_require.append('mock')
setup(
name='cnx-epub',
version='0.1',
author='Connexions team',
author_email='info@cnx.org',
url="https://github.com/connexions/cnx-epub",
license='LGPL, See also LICENSE.txt',
description=description,
install_requires=install_requires,
tests_require=tests_require,
extras_require=extras_require,
packages=find_packages(),
include_package_data=False,
entry_points="""\
[console_scripts]
""",
test_suite='cnxepub.tests',
zip_safe=False,
)
|
b2a064000a79151c3e9bda06e970bc8208cce330
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
Fix url in distutils description.
|
Fix url in distutils description.
|
Python
|
bsd-2-clause
|
theatlantic/django-auth-ldap,DheerendraRathor/django-auth-ldap-ng
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
Fix url in distutils description.
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
<commit_msg>Fix url in distutils description.<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
Fix url in distutils description.#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at <a href="http://packages.python.org/django-auth-ldap/">http://packages.python.org/django-auth-ldap/</a>.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
<commit_msg>Fix url in distutils description.<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(
name="django-auth-ldap",
version="1.0b7",
description="Django LDAP authentication backend",
long_description="""This is a Django authentication backend that authenticates against an LDAP service. Configuration can be as simple as a single distinguished name template, but there are many rich configuration options for working with users, groups, and permissions.
This package requires Python 2.3, Django 1.0, and python-ldap. Documentation can be found at http://packages.python.org/django-auth-ldap/.
""",
url="http://bitbucket.org/psagers/django-auth-ldap/",
author="Peter Sagerson",
author_email="psagers_pypi@ignorare.net",
license="BSD",
packages=["django_auth_ldap"],
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Programming Language :: Python",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords=["django", "ldap", "authentication"],
)
|
53931faf00ee64bec253e1ae9a5c6be66298d379
|
setup.py
|
setup.py
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
Add sentry_sdk to cxfreeze packages
|
Add sentry_sdk to cxfreeze packages
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
Add sentry_sdk to cxfreeze packages
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
<commit_before>from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
<commit_msg>Add sentry_sdk to cxfreeze packages<commit_after>
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
Add sentry_sdk to cxfreeze packagesfrom cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
<commit_before>from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
<commit_msg>Add sentry_sdk to cxfreeze packages<commit_after>from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
b589feb587a7fa693d77f35395485f5b673e9d95
|
celery/backends/__init__.py
|
celery/backends/__init__.py
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``celery.backends.`` will be prepended to the name. e.g.
``database`` becomes ``celery.backends.database``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``"celery.backends."`` will be prepended to the name. e.g.
``"database"`` becomes ``"celery.backends.database"``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
Add strings in weak quotes
|
Add strings in weak quotes
|
Python
|
bsd-3-clause
|
frac/celery,cbrepo/celery,WoLpH/celery,WoLpH/celery,cbrepo/celery,ask/celery,ask/celery,mitsuhiko/celery,frac/celery,mitsuhiko/celery
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``celery.backends.`` will be prepended to the name. e.g.
``database`` becomes ``celery.backends.database``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
Add strings in weak quotes
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``"celery.backends."`` will be prepended to the name. e.g.
``"database"`` becomes ``"celery.backends.database"``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
<commit_before>"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``celery.backends.`` will be prepended to the name. e.g.
``database`` becomes ``celery.backends.database``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
<commit_msg>Add strings in weak quotes<commit_after>
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``"celery.backends."`` will be prepended to the name. e.g.
``"database"`` becomes ``"celery.backends.database"``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``celery.backends.`` will be prepended to the name. e.g.
``database`` becomes ``celery.backends.database``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
Add strings in weak quotes"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``"celery.backends."`` will be prepended to the name. e.g.
``"database"`` becomes ``"celery.backends.database"``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
<commit_before>"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``celery.backends.`` will be prepended to the name. e.g.
``database`` becomes ``celery.backends.database``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
<commit_msg>Add strings in weak quotes<commit_after>"""celery.backends"""
from functools import partial
from django.conf import settings
import sys
DEFAULT_BACKEND = "database"
CELERY_BACKEND = getattr(settings, "CELERY_BACKEND", DEFAULT_BACKEND)
def get_backend_cls(backend):
"""Get backend class by name.
If the name does not include "``.``" (is not fully qualified),
``"celery.backends."`` will be prepended to the name. e.g.
``"database"`` becomes ``"celery.backends.database"``.
"""
if backend.find(".") == -1:
backend = "celery.backends.%s" % backend
__import__(backend)
backend_module = sys.modules[backend]
return getattr(backend_module, 'Backend')
"""
.. function:: get_default_backend_cls()
Get the backend class specified in :settings:`CELERY_BACKEND`.
"""
get_default_backend_cls = partial(get_backend_cls, CELERY_BACKEND)
"""
.. class:: DefaultBackend
The backend class specified in :setting:`CELERY_BACKEND`.
"""
DefaultBackend = get_default_backend_cls()
"""
.. data:: default_backend
An instance of :class:`DefaultBackend`.
"""
default_backend = DefaultBackend()
|
a51d2ec96469686e3a2767e29951b8442b17da69
|
test/test_util.py
|
test/test_util.py
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
assert 'hehe' in self.apm._paradigms
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
def test_override(self):
self.apm.paradigms = {'hehe': True}
assert self.apm._paradigms == {'hehe': True}
|
Add tests for APM overrides
|
Add tests for APM overrides
|
Python
|
mit
|
thiderman/network-kitten
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
assert 'hehe' in self.apm._paradigms
Add tests for APM overrides
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
def test_override(self):
self.apm.paradigms = {'hehe': True}
assert self.apm._paradigms == {'hehe': True}
|
<commit_before>from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
assert 'hehe' in self.apm._paradigms
<commit_msg>Add tests for APM overrides<commit_after>
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
def test_override(self):
self.apm.paradigms = {'hehe': True}
assert self.apm._paradigms == {'hehe': True}
|
from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
assert 'hehe' in self.apm._paradigms
Add tests for APM overridesfrom kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
def test_override(self):
self.apm.paradigms = {'hehe': True}
assert self.apm._paradigms == {'hehe': True}
|
<commit_before>from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
assert 'hehe' in self.apm._paradigms
<commit_msg>Add tests for APM overrides<commit_after>from kitten.util import AutoParadigmMixin
class TestAutoParadigmMixin(object):
def setup_method(self, method):
self.apm = AutoParadigmMixin()
def test_first_load(self):
ret = self.apm.paradigms
assert 'node' in ret
assert 'node' in self.apm._paradigms
def test_second_load(self):
self.apm._paradigms = {'hehe': True}
ret = self.apm.paradigms
assert 'hehe' in ret
def test_override(self):
self.apm.paradigms = {'hehe': True}
assert self.apm._paradigms == {'hehe': True}
|
6cd9c7285d462311580754229d0b85af844dd387
|
test/integration/test_cli.py
|
test/integration/test_cli.py
|
import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
|
import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
Update integration test with actual...integration test
|
Update integration test with actual...integration test
|
Python
|
apache-2.0
|
Psykar/kubos,kubostech/KubOS,Psykar/kubos,Psykar/kubos,Psykar/kubos,kubostech/KubOS,Psykar/kubos,Psykar/kubos,Psykar/kubos
|
import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
Update integration test with actual...integration test
|
import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Update integration test with actual...integration test<commit_after>
|
import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
Update integration test with actual...integration testimport unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
class TestCLI(unittest.TestCase):
def test_kubos_installed(self):
self.assertEqual('foo'.upper(), 'FOO')
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
<commit_msg>Update integration test with actual...integration test<commit_after>import unittest
import re
import subprocess
class TestCLI(unittest.TestCase):
def test_latest_kubos_installed(self):
bashCommand = "vagrant ssh -c 'kubos update'"
process = subprocess.Popen(bashCommand.split())
output, error = process.communicate()
regex = re.compile(r"All up to date!")
self.assertTrue(regex.search( output ))
if __name__ == '__main__':
unittest.main()
|
ec7da0420a83223c0f636ddb9a7ebfcfa943f2da
|
test/mock_settings_device.py
|
test/mock_settings_device.py
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def addSetting(self, path, value, _min, _max, silent=False, callback=None):
from mock_dbus_monitor import MockImportItem
return MockImportItem(value)
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
Add addSettings call also to the testing code.
|
Add addSettings call also to the testing code.
|
Python
|
mit
|
victronenergy/velib_python
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
Add addSettings call also to the testing code.
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def addSetting(self, path, value, _min, _max, silent=False, callback=None):
from mock_dbus_monitor import MockImportItem
return MockImportItem(value)
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
<commit_before>PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
<commit_msg>Add addSettings call also to the testing code.<commit_after>
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def addSetting(self, path, value, _min, _max, silent=False, callback=None):
from mock_dbus_monitor import MockImportItem
return MockImportItem(value)
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
Add addSettings call also to the testing code.PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def addSetting(self, path, value, _min, _max, silent=False, callback=None):
from mock_dbus_monitor import MockImportItem
return MockImportItem(value)
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
<commit_before>PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
<commit_msg>Add addSettings call also to the testing code.<commit_after>PATH = 0
VALUE = 1
MINIMUM = 2
MAXIMUM = 3
# Simulates the SettingsSevice object without using the D-Bus (intended for unit tests). Values passed to
# __setitem__ (or the [] operator) will be stored in memory for later retrieval by __getitem__.
class MockSettingsDevice(object):
def __init__(self, supported_settings, event_callback, name='com.victronenergy.settings', timeout=0):
self._dbus_name = name
self._settings = supported_settings
self._event_callback = event_callback
def addSetting(self, path, value, _min, _max, silent=False, callback=None):
from mock_dbus_monitor import MockImportItem
return MockImportItem(value)
def get_short_name(self, path):
for k,v in self._settings.items():
if v[PATH] == path:
return k
return None
def __getitem__(self, setting):
return self._settings[setting][VALUE]
def __setitem__(self, setting, new_value):
s = self._settings.get(setting, None)
if s is None:
raise Exception('setting not found')
old_value = s[VALUE]
if old_value == new_value:
return
s[VALUE] = new_value
if self._event_callback is not None:
self._event_callback(setting, old_value, new_value)
|
6dfc6cffb2594b420843ce7021988f78de2b4faf
|
estmator_project/estmator_project/test.py
|
estmator_project/estmator_project/test.py
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
Test commit for travis setup
|
Test commit for travis setup
|
Python
|
mit
|
Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
Test commit for travis setup
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
<commit_before>from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
<commit_msg>Test commit for travis setup<commit_after>
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
Test commit for travis setupfrom test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
<commit_before>from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
pass
<commit_msg>Test commit for travis setup<commit_after>from test_plus.test import TestCase as PlusTestCase
class TestCase(PlusTestCase):
"""Sublcassed TestCase for project."""
pass
|
8c55bdc78b3ae2c52826740ab049a2bab5ca1fdd
|
src/nodeconductor_saltstack/exchange/extension.py
|
src/nodeconductor_saltstack/exchange/extension.py
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'exchange-sync-quotas': {
'task': 'nodeconductor.exchange.sync_quotas',
'schedule': timedelta(hours=1),
'args': ()
},
}
|
Add sync quota task to celerybeat
|
Add sync quota task to celerybeat
- nc-1009
|
Python
|
mit
|
opennode/nodeconductor-saltstack
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
Add sync quota task to celerybeat
- nc-1009
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'exchange-sync-quotas': {
'task': 'nodeconductor.exchange.sync_quotas',
'schedule': timedelta(hours=1),
'args': ()
},
}
|
<commit_before>from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
<commit_msg>Add sync quota task to celerybeat
- nc-1009<commit_after>
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'exchange-sync-quotas': {
'task': 'nodeconductor.exchange.sync_quotas',
'schedule': timedelta(hours=1),
'args': ()
},
}
|
from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
Add sync quota task to celerybeat
- nc-1009from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'exchange-sync-quotas': {
'task': 'nodeconductor.exchange.sync_quotas',
'schedule': timedelta(hours=1),
'args': ()
},
}
|
<commit_before>from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
<commit_msg>Add sync quota task to celerybeat
- nc-1009<commit_after>from nodeconductor.core import NodeConductorExtension
class ExchangeExtension(NodeConductorExtension):
@staticmethod
def django_app():
return 'nodeconductor_saltstack.exchange'
@staticmethod
def rest_urls():
from .urls import register_in
return register_in
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'exchange-sync-quotas': {
'task': 'nodeconductor.exchange.sync_quotas',
'schedule': timedelta(hours=1),
'args': ()
},
}
|
3c01c07e13dfd79a76408926b13848417a3cfb3e
|
tests/test_requesthandler.py
|
tests/test_requesthandler.py
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'))
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'))
r = self.request(q)
self.assertEqual(r, [])
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'), {}, [])
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
|
Fix requesthandler tests (new version of datamodel).
|
Fix requesthandler tests (new version of datamodel).
|
Python
|
mit
|
iScienceLuvr/PPP-NaturalMath,ProjetPP/PPP-NaturalMath,iScienceLuvr/ckse,iScienceLuvr/PPP-NaturalMath,ProjetPP/PPP-NaturalMath,iScienceLuvr/ckse
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'))
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'))
r = self.request(q)
self.assertEqual(r, [])
Fix requesthandler tests (new version of datamodel).
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'), {}, [])
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
|
<commit_before>from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'))
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'))
r = self.request(q)
self.assertEqual(r, [])
<commit_msg>Fix requesthandler tests (new version of datamodel).<commit_after>
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'), {}, [])
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
|
from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'))
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'))
r = self.request(q)
self.assertEqual(r, [])
Fix requesthandler tests (new version of datamodel).from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'), {}, [])
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
|
<commit_before>from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'))
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'))
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'))
r = self.request(q)
self.assertEqual(r, [])
<commit_msg>Fix requesthandler tests (new version of datamodel).<commit_after>from ppp_datamodel import Sentence, Resource
from ppp_datamodel.communication import Request, TraceItem, Response
from ppp_libmodule.tests import PPPTestCase
from ppp_natural_math import app
class TestFollowing(PPPTestCase(app)):
config_var = 'PPP_NATURALMATH'
config = ''
def testBasics(self):
q = Request('1', 'en', Resource('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('integral of x^y'), {}, [])
r = self.request(q)
self.assertEqual(len(r), 1, r)
self.assertEqual(r[0].tree, Sentence('Integrate(x^y, y)'))
q = Request('1', 'en', Sentence('x'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
q = Request('1', 'en', Sentence('*$$!-|'), {}, [])
r = self.request(q)
self.assertEqual(r, [])
|
e4b1fcf017494c22744f44bd93381b8063b30e34
|
eadred/tests/test_generate.py
|
eadred/tests/test_generate.py
|
import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
|
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
Fix test to catch options issues
|
Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.
|
Python
|
bsd-3-clause
|
willkg/django-eadred
|
import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.
|
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
<commit_before>import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
<commit_msg>Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.<commit_after>
|
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
<commit_before>import unittest
from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.execute()
assert sampledata.called == True
<commit_msg>Fix test to catch options issues
The test should now catch the issue that was fixed in 79a453f.<commit_after>from eadred.management.commands import generatedata
def test_generatedata():
"""Basic test to make sure function gets called."""
from testproject.testapp import sampledata
assert sampledata.called == False
cmd = generatedata.Command()
cmd.run_from_argv(['manage.py', ''])
assert sampledata.called == True
|
62494cd7125d498d8de058ab3ebe556cd9686f6e
|
calvin/runtime/north/plugins/coders/messages/msgpack_coder.py
|
calvin/runtime/north/plugins/coders/messages/msgpack_coder.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
|
Use umsgpack package for msgpack coder
|
coder/msgpack: Use umsgpack package for msgpack coder
|
Python
|
apache-2.0
|
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
coder/msgpack: Use umsgpack package for msgpack coder
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
<commit_msg>coder/msgpack: Use umsgpack package for msgpack coder<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
coder/msgpack: Use umsgpack package for msgpack coder# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
<commit_msg>coder/msgpack: Use umsgpack package for msgpack coder<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
|
8cd859609a8a58474ff152d9adbb968ab3cdffa0
|
gaphor/diagram/diagramtools/tests/test_txtool.py
|
gaphor/diagram/diagramtools/tests/test_txtool.py
|
from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
|
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
Fix tests for tx tool
|
Fix tests for tx tool
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
Fix tests for tx tool
|
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
<commit_before>from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
<commit_msg>Fix tests for tx tool<commit_after>
|
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
Fix tests for tx toolfrom gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
<commit_before>from unittest.mock import Mock
from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import TxData, on_begin, transactional_tool
from gaphor.transaction import TransactionBegin
def xtest_start_tx_on_begin(view, event_manager):
event_manager.handle = Mock()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
assert event_manager.handle.called
assert isinstance(event_manager.handle.call_args.args[0], TransactionBegin)
<commit_msg>Fix tests for tx tool<commit_after>from gi.repository import Gtk
from gaphor.diagram.diagramtools.txtool import (
TxData,
on_begin,
on_end,
transactional_tool,
)
from gaphor.transaction import TransactionBegin
class MockEventManager:
def __init__(self):
self.events = []
def handle(self, event):
self.events.append(event)
def test_start_tx_on_begin(view):
event_manager = MockEventManager()
tx_data = TxData(event_manager)
tool = transactional_tool(Gtk.GestureDrag.new(view), event_manager)
on_begin(tool, None, tx_data)
assert tx_data.tx
on_end(tool, None, tx_data)
assert event_manager.events
assert isinstance(event_manager.events[0], TransactionBegin)
|
9ef44fb81e8c9fef40f5b21e648c2500e01169f4
|
medical_patient_ethnicity/models/medical_patient_ethnicity.py
|
medical_patient_ethnicity/models/medical_patient_ethnicity.py
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char()
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
Add required to ethnicity code
|
Add required to ethnicity code
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,ShaheenHossain/eagle-medical,ShaheenHossain/eagle-medical,laslabs/vertical-medical
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char()
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
Add required to ethnicity code
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
<commit_before># -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char()
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
<commit_msg>Add required to ethnicity code<commit_after>
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char()
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
Add required to ethnicity code# -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
<commit_before># -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char()
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
<commit_msg>Add required to ethnicity code<commit_after># -*- coding: utf-8 -*-
# #############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# #############################################################################
from openerp import models, fields
class MedicalPatientEthnicity(models.Model):
_name = 'medical.patient.ethnicity'
notes = fields.Char()
code = fields.Char(required=True, )
name = fields.Char(required=True, translate=True)
_sql_constraints = [
('name_uniq', 'UNIQUE(name)', 'Ethnicity name must be unique!'),
('code_uniq', 'UNIQUE(code)', 'Ethnicity code must be unique!'),
]
|
63c300670a8406ac403841630aded1a810d929fd
|
lib/subprocess_tee/test/test_rich.py
|
lib/subprocess_tee/test/test_rich.py
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
def test_rich_console_ex_ansi() -> None:
"""Validate that ANSI sent to sys.stdout does not become garbage in record."""
print()
console = ConsoleEx(force_terminal=True, record=True, redirect=True)
console.print("[green]this from Console.print()[/green]", style="red")
proc = run(r'echo -e "\033[31mred\033[0m"')
assert proc.returncode == 0
assert proc.stdout == "\x1b[31mred\x1b[0m\n"
# validate that what rich recorded is the same as what the subprocess produced
text = console.export_text(clear=False)
assert "\x1b[31mred\x1b[0m\n" in text
# validate that html export also contains at least the "red" text
html = console.export_html(clear=False)
assert "red" in html
|
Add testing of rich html_export
|
Add testing of rich html_export
|
Python
|
mit
|
pycontribs/subprocess-tee
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
Add testing of rich html_export
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
def test_rich_console_ex_ansi() -> None:
"""Validate that ANSI sent to sys.stdout does not become garbage in record."""
print()
console = ConsoleEx(force_terminal=True, record=True, redirect=True)
console.print("[green]this from Console.print()[/green]", style="red")
proc = run(r'echo -e "\033[31mred\033[0m"')
assert proc.returncode == 0
assert proc.stdout == "\x1b[31mred\x1b[0m\n"
# validate that what rich recorded is the same as what the subprocess produced
text = console.export_text(clear=False)
assert "\x1b[31mred\x1b[0m\n" in text
# validate that html export also contains at least the "red" text
html = console.export_html(clear=False)
assert "red" in html
|
<commit_before>"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
<commit_msg>Add testing of rich html_export<commit_after>
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
def test_rich_console_ex_ansi() -> None:
"""Validate that ANSI sent to sys.stdout does not become garbage in record."""
print()
console = ConsoleEx(force_terminal=True, record=True, redirect=True)
console.print("[green]this from Console.print()[/green]", style="red")
proc = run(r'echo -e "\033[31mred\033[0m"')
assert proc.returncode == 0
assert proc.stdout == "\x1b[31mred\x1b[0m\n"
# validate that what rich recorded is the same as what the subprocess produced
text = console.export_text(clear=False)
assert "\x1b[31mred\x1b[0m\n" in text
# validate that html export also contains at least the "red" text
html = console.export_html(clear=False)
assert "red" in html
|
"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
Add testing of rich html_export"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
def test_rich_console_ex_ansi() -> None:
"""Validate that ANSI sent to sys.stdout does not become garbage in record."""
print()
console = ConsoleEx(force_terminal=True, record=True, redirect=True)
console.print("[green]this from Console.print()[/green]", style="red")
proc = run(r'echo -e "\033[31mred\033[0m"')
assert proc.returncode == 0
assert proc.stdout == "\x1b[31mred\x1b[0m\n"
# validate that what rich recorded is the same as what the subprocess produced
text = console.export_text(clear=False)
assert "\x1b[31mred\x1b[0m\n" in text
# validate that html export also contains at least the "red" text
html = console.export_html(clear=False)
assert "red" in html
|
<commit_before>"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
<commit_msg>Add testing of rich html_export<commit_after>"""Tests for rich module."""
import sys
from subprocess_tee import run
from subprocess_tee.rich import ConsoleEx
def test_rich_console_ex() -> None:
"""Validate that ConsoleEx can capture output from print() calls."""
console = ConsoleEx(record=True, redirect=True)
console.print("alpha")
print("beta")
sys.stdout.write("gamma\n")
sys.stderr.write("delta\n")
proc = run("echo 123")
assert proc.stdout == "123\n"
text = console.export_text()
assert text == "alpha\nbeta\ngamma\ndelta\n123\n"
def test_rich_console_ex_ansi() -> None:
"""Validate that ANSI sent to sys.stdout does not become garbage in record."""
print()
console = ConsoleEx(force_terminal=True, record=True, redirect=True)
console.print("[green]this from Console.print()[/green]", style="red")
proc = run(r'echo -e "\033[31mred\033[0m"')
assert proc.returncode == 0
assert proc.stdout == "\x1b[31mred\x1b[0m\n"
# validate that what rich recorded is the same as what the subprocess produced
text = console.export_text(clear=False)
assert "\x1b[31mred\x1b[0m\n" in text
# validate that html export also contains at least the "red" text
html = console.export_html(clear=False)
assert "red" in html
|
8734b8448edafe10db7380c93039832f8e43624c
|
examples/connect4/connect4.py
|
examples/connect4/connect4.py
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 1
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 3 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 1 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
Use players 0 and 1 instead of 1 and 2
|
Use players 0 and 1 instead of 1 and 2
Makes it easier to communicate with ANN, as 1 is truthy and 0 is falsy.
Players are also slightly more clear when Connect4 is printed.
|
Python
|
mit
|
tysonzero/py-ann
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 1
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 3 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
Use players 0 and 1 instead of 1 and 2
Makes it easier to communicate with ANN, as 1 is truthy and 0 is falsy.
Players are also slightly more clear when Connect4 is printed.
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 1 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
<commit_before>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 1
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 3 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
<commit_msg>Use players 0 and 1 instead of 1 and 2
Makes it easier to communicate with ANN, as 1 is truthy and 0 is falsy.
Players are also slightly more clear when Connect4 is printed.<commit_after>
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 1 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 1
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 3 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
Use players 0 and 1 instead of 1 and 2
Makes it easier to communicate with ANN, as 1 is truthy and 0 is falsy.
Players are also slightly more clear when Connect4 is printed.class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 1 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
<commit_before>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 1
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 3 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
<commit_msg>Use players 0 and 1 instead of 1 and 2
Makes it easier to communicate with ANN, as 1 is truthy and 0 is falsy.
Players are also slightly more clear when Connect4 is printed.<commit_after>class Connect4(object):
def __init__(self):
self.pieces = [[] for i in xrange(7)]
self.turn = 0
def move(self, column):
self.pieces[column % 7].append(self.turn)
self.turn = 1 - self.turn
def __str__(self):
output = ''
for i in xrange(6):
output += i and '\n ' or ' '
for piece_column in self.pieces:
try:
output += str(piece_column[5 - i]) + ' '
except IndexError:
output += ' '
return output
def start():
pass
|
c7d09823bb75146602146606823f0f7906edbec3
|
babybuddy/urls.py
|
babybuddy/urls.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
Fix linting issue with url line length
|
Fix linting issue with url line length
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix linting issue with url line length
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix linting issue with url line length<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix linting issue with url line length# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix linting issue with url line length<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^login/$', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url('^password_reset/$', auth_views.PasswordResetView.as_view(),
name='password_reset',),
url(r'^$', views.RootRouter.as_view(), name='root-router'),
url(r'^welcome/$', views.Welcome.as_view(), name='welcome'),
url(r'^user/settings/$', views.UserSettings.as_view(),
name='user-settings'),
url(r'', include('api.urls', namespace='api')),
url(r'', include('core.urls')),
url(r'', include('dashboard.urls')),
url(r'', include('reports.urls', namespace='reports')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
cda5fcb56ecdfe5a2f49d0efbf76e853c8c50e6c
|
migration_scripts/0.3/crypto_util.py
|
migration_scripts/0.3/crypto_util.py
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
Load files from absolute paths so this can be run from anywhere
|
Load files from absolute paths so this can be run from anywhere
|
Python
|
agpl-3.0
|
mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
Load files from absolute paths so this can be run from anywhere
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
<commit_before># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
<commit_msg>Load files from absolute paths so this can be run from anywhere<commit_after>
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
Load files from absolute paths so this can be run from anywhere# -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
<commit_before># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import random as badrandom
nouns = file("nouns.txt").read().split('\n')
adjectives = file("adjectives.txt").read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
<commit_msg>Load files from absolute paths so this can be run from anywhere<commit_after># -*- coding: utf-8 -*-
# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
# regenerate journalist designations from soure's filesystem id's.
import os
import random as badrandom
# Find the absolute path relative to this file so this script can be run anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
|
66d70394d22870ee4ff8035342d88a5c65a9f338
|
samsa/utils/delayedconfig.py
|
samsa/utils/delayedconfig.py
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked. '
'Configuring now.',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
Make log message a little clearer
|
Make log message a little clearer
|
Python
|
apache-2.0
|
sammerry/pykafka,jofusa/pykafka,aeroevan/pykafka,benauthor/pykafka,sammerry/pykafka,vortec/pykafka,aeroevan/pykafka,vortec/pykafka,sontek/pykafka,sontek/pykafka,thedrow/samsa,wikimedia/operations-debs-python-pykafka,tempbottle/pykafka,benauthor/pykafka,thedrow/samsa,jofusa/pykafka,benauthor/pykafka,wikimedia/operations-debs-python-pykafka,thedrow/samsa,yungchin/pykafka,yungchin/pykafka,fortime/pykafka,wikimedia/operations-debs-python-pykafka,appsoma/pykafka,fortime/pykafka,tempbottle/pykafka,appsoma/pykafka
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
Make log message a little clearer
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked. '
'Configuring now.',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
<commit_before>__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
<commit_msg>Make log message a little clearer<commit_after>
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked. '
'Configuring now.',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
Make log message a little clearer__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked. '
'Configuring now.',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
<commit_before>__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
<commit_msg>Make log message a little clearer<commit_after>__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import functools
import logging
logger = logging.getLogger(__name__)
def requires_configuration(method):
"""
A method decorator for objects that derive from
:class:`DelayedConfiguration` that ensures methods that require
configuration have the appropriate state before being invoked.
"""
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self._configured:
logger.debug('%s requires configuration before %s may be invoked. '
'Configuring now.',
self, method)
self._configured = True
self._configure()
return method(self, *args, **kwargs)
return wrapped
class DelayedConfiguration(object):
"""
A mixin class for objects that can be instantiated without their full
configuration available.
Subclasses must implement :meth:`_configure`, which will be called once
to bootstrap the configuration for the each instance.
"""
_configured = False
def _configure(self):
raise NotImplementedError
|
da52ad14f23bcecd8d0107742eb8a1d9a447265a
|
opps/views/generic/detail.py
|
opps/views/generic/detail.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
Fix pep8, remove blank line on DetailView
|
Fix pep8, remove blank line on DetailView
|
Python
|
mit
|
opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
Fix pep8, remove blank line on DetailView
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
<commit_msg>Fix pep8, remove blank line on DetailView<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
Fix pep8, remove blank line on DetailView#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
<commit_msg>Fix pep8, remove blank line on DetailView<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView as DjangoDetailView
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from opps.views.generic.base import View
class DetailView(View, DjangoDetailView):
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
templates.append('{}/{}/{}/detail.html'.format(domain_folder,
self.long_slug,
self.slug))
templates.append('{}/{}/detail.html'.format(domain_folder,
self.long_slug))
templates.append('{}/detail.html'.format(domain_folder))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.slug = self.kwargs.get('slug')
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
filters = {}
filters['site_domain'] = self.site.domain
filters['channel_long_slug'] = self.long_slug
filters['slug'] = self.slug
preview_enabled = self.request.user and self.request.user.is_staff
if not preview_enabled:
filters['date_available__lte'] = timezone.now()
filters['published'] = True
queryset = super(DetailView, self).get_queryset()
return queryset.filter(**filters)._clone()
|
fe6b7b9abb8e9730a3d028850337c047fe6607ea
|
tests/unit/services/user/test_models_full_name.py
|
tests/unit/services/user/test_models_full_name.py
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from testfixtures.user import create_user_with_detail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user_with_detail(first_names=first_names, last_name=last_name)
assert user.detail.full_name == expected
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
import pytest
from byceps.services.user.models.user import User as DbUser
from byceps.services.user.models.detail import UserDetail as DbUserDetail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user(first_names, last_name)
assert user.detail.full_name == expected
def create_user(first_names: str, last_name: str) -> DbUser:
created_at = datetime.utcnow()
screen_name = 'Anyone'
email_address = 'anyone@example.test'
user = DbUser(created_at, screen_name, email_address)
detail = DbUserDetail(user=user)
detail.first_names = first_names
detail.last_name = last_name
return user
|
Create fullname user object locally in test
|
Create fullname user object locally in test
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from testfixtures.user import create_user_with_detail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user_with_detail(first_names=first_names, last_name=last_name)
assert user.detail.full_name == expected
Create fullname user object locally in test
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
import pytest
from byceps.services.user.models.user import User as DbUser
from byceps.services.user.models.detail import UserDetail as DbUserDetail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user(first_names, last_name)
assert user.detail.full_name == expected
def create_user(first_names: str, last_name: str) -> DbUser:
created_at = datetime.utcnow()
screen_name = 'Anyone'
email_address = 'anyone@example.test'
user = DbUser(created_at, screen_name, email_address)
detail = DbUserDetail(user=user)
detail.first_names = first_names
detail.last_name = last_name
return user
|
<commit_before>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from testfixtures.user import create_user_with_detail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user_with_detail(first_names=first_names, last_name=last_name)
assert user.detail.full_name == expected
<commit_msg>Create fullname user object locally in test<commit_after>
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
import pytest
from byceps.services.user.models.user import User as DbUser
from byceps.services.user.models.detail import UserDetail as DbUserDetail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user(first_names, last_name)
assert user.detail.full_name == expected
def create_user(first_names: str, last_name: str) -> DbUser:
created_at = datetime.utcnow()
screen_name = 'Anyone'
email_address = 'anyone@example.test'
user = DbUser(created_at, screen_name, email_address)
detail = DbUserDetail(user=user)
detail.first_names = first_names
detail.last_name = last_name
return user
|
"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from testfixtures.user import create_user_with_detail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user_with_detail(first_names=first_names, last_name=last_name)
assert user.detail.full_name == expected
Create fullname user object locally in test"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
import pytest
from byceps.services.user.models.user import User as DbUser
from byceps.services.user.models.detail import UserDetail as DbUserDetail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user(first_names, last_name)
assert user.detail.full_name == expected
def create_user(first_names: str, last_name: str) -> DbUser:
created_at = datetime.utcnow()
screen_name = 'Anyone'
email_address = 'anyone@example.test'
user = DbUser(created_at, screen_name, email_address)
detail = DbUserDetail(user=user)
detail.first_names = first_names
detail.last_name = last_name
return user
|
<commit_before>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from testfixtures.user import create_user_with_detail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user_with_detail(first_names=first_names, last_name=last_name)
assert user.detail.full_name == expected
<commit_msg>Create fullname user object locally in test<commit_after>"""
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
import pytest
from byceps.services.user.models.user import User as DbUser
from byceps.services.user.models.detail import UserDetail as DbUserDetail
@pytest.mark.parametrize(
'first_names, last_name, expected',
[
(None, None , None ),
('Giesbert Z.', None , 'Giesbert Z.' ),
(None, 'Blümli', 'Blümli' ),
('Giesbert Z.', 'Blümli', 'Giesbert Z. Blümli'),
],
)
def test_full_name(first_names, last_name, expected):
user = create_user(first_names, last_name)
assert user.detail.full_name == expected
def create_user(first_names: str, last_name: str) -> DbUser:
created_at = datetime.utcnow()
screen_name = 'Anyone'
email_address = 'anyone@example.test'
user = DbUser(created_at, screen_name, email_address)
detail = DbUserDetail(user=user)
detail.first_names = first_names
detail.last_name = last_name
return user
|
f9e1c8a536c0212414047b941f502bfbee92be92
|
l10n_br_fiscal/wizards/document_cancel_wizard.py
|
l10n_br_fiscal/wizards/document_cancel_wizard.py
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wiz in self:
if wiz.document_id:
message = _("Cancellation: {}").format(wiz.justification)
wiz.document_id.with_context(message=message)._document_cancel()
return {'type': 'ir.actions.act_window_close'}
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wizard in self:
if wizard.document_id:
wizard.document_id._document_cancel(wizard.justification)
self._close()
|
Update Fiscal document cancel wizard
|
[FIX] Update Fiscal document cancel wizard
|
Python
|
agpl-3.0
|
OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wiz in self:
if wiz.document_id:
message = _("Cancellation: {}").format(wiz.justification)
wiz.document_id.with_context(message=message)._document_cancel()
return {'type': 'ir.actions.act_window_close'}
[FIX] Update Fiscal document cancel wizard
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wizard in self:
if wizard.document_id:
wizard.document_id._document_cancel(wizard.justification)
self._close()
|
<commit_before># Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wiz in self:
if wiz.document_id:
message = _("Cancellation: {}").format(wiz.justification)
wiz.document_id.with_context(message=message)._document_cancel()
return {'type': 'ir.actions.act_window_close'}
<commit_msg>[FIX] Update Fiscal document cancel wizard<commit_after>
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wizard in self:
if wizard.document_id:
wizard.document_id._document_cancel(wizard.justification)
self._close()
|
# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wiz in self:
if wiz.document_id:
message = _("Cancellation: {}").format(wiz.justification)
wiz.document_id.with_context(message=message)._document_cancel()
return {'type': 'ir.actions.act_window_close'}
[FIX] Update Fiscal document cancel wizard# Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wizard in self:
if wizard.document_id:
wizard.document_id._document_cancel(wizard.justification)
self._close()
|
<commit_before># Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wiz in self:
if wiz.document_id:
message = _("Cancellation: {}").format(wiz.justification)
wiz.document_id.with_context(message=message)._document_cancel()
return {'type': 'ir.actions.act_window_close'}
<commit_msg>[FIX] Update Fiscal document cancel wizard<commit_after># Copyright 2019 KMEE
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class DocumentCancelWizard(models.TransientModel):
_name = 'l10n_br_fiscal.document.cancel.wizard'
_description = 'Fiscal Document Cancel Wizard'
_inherit = 'l10n_br_fiscal.base.wizard.mixin'
@api.multi
def doit(self):
for wizard in self:
if wizard.document_id:
wizard.document_id._document_cancel(wizard.justification)
self._close()
|
cc8624cfa3788dc66e7afb144fc24ef5f1a79ff9
|
scripts/json-concat-lists.py
|
scripts/json-concat-lists.py
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
Remove duplicate JSON gaferences when concatenating
|
Remove duplicate JSON gaferences when concatenating
|
Python
|
bsd-3-clause
|
geneontology/go-site,geneontology/go-site,geneontology/go-site,geneontology/go-site,geneontology/go-site
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
Remove duplicate JSON gaferences when concatenating
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
<commit_before>import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
<commit_msg>Remove duplicate JSON gaferences when concatenating<commit_after>
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
Remove duplicate JSON gaferences when concatenatingimport json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
<commit_before>import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
list_all += json.load(in_f)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
<commit_msg>Remove duplicate JSON gaferences when concatenating<commit_after>import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('json_file', nargs='+')
parser.add_argument('output_file')
if __name__ == "__main__":
args = parser.parse_args()
list_all = []
for jf in args.json_file:
with open(jf) as in_f:
file_jsons = json.load(in_f)
for fj in file_jsons:
if fj not in list_all:
list_all.append(fj)
with open(args.output_file, 'w') as out_f:
json.dump(list_all, out_f)
|
44d8692b6739856170ba5d5b9712a2afe170f4df
|
presentation/tests.py
|
presentation/tests.py
|
from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test import RequestFactory
from presentation.models import Presentation
from presentation.views import PresentationList
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
presentation_list_url = reverse('presentation:list')
request = self.factory.get(presentation_list_url)
request.user = AnonymousUser()
response = PresentationList.as_view()(request)
self.assertIn(response, self.presentation.subject)
self.assertIn(response, self.presentation.author.name)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from presentation.models import Presentation
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.client = Client()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
response = self.client.get(reverse('presentation:list'))
self.assertContains(response, self.presentation.subject)
self.assertContains(response, self.presentation.author.username)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
Fix test case with client
|
Fix test case with client
|
Python
|
mit
|
SaturDJang/warp,SaturDJang/warp,SaturDJang/warp,SaturDJang/warp
|
from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test import RequestFactory
from presentation.models import Presentation
from presentation.views import PresentationList
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
presentation_list_url = reverse('presentation:list')
request = self.factory.get(presentation_list_url)
request.user = AnonymousUser()
response = PresentationList.as_view()(request)
self.assertIn(response, self.presentation.subject)
self.assertIn(response, self.presentation.author.name)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
Fix test case with client
|
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from presentation.models import Presentation
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.client = Client()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
response = self.client.get(reverse('presentation:list'))
self.assertContains(response, self.presentation.subject)
self.assertContains(response, self.presentation.author.username)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test import RequestFactory
from presentation.models import Presentation
from presentation.views import PresentationList
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
presentation_list_url = reverse('presentation:list')
request = self.factory.get(presentation_list_url)
request.user = AnonymousUser()
response = PresentationList.as_view()(request)
self.assertIn(response, self.presentation.subject)
self.assertIn(response, self.presentation.author.name)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
<commit_msg>Fix test case with client<commit_after>
|
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from presentation.models import Presentation
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.client = Client()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
response = self.client.get(reverse('presentation:list'))
self.assertContains(response, self.presentation.subject)
self.assertContains(response, self.presentation.author.username)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test import RequestFactory
from presentation.models import Presentation
from presentation.views import PresentationList
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
presentation_list_url = reverse('presentation:list')
request = self.factory.get(presentation_list_url)
request.user = AnonymousUser()
response = PresentationList.as_view()(request)
self.assertIn(response, self.presentation.subject)
self.assertIn(response, self.presentation.author.name)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
Fix test case with clientfrom django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from presentation.models import Presentation
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.client = Client()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
response = self.client.get(reverse('presentation:list'))
self.assertContains(response, self.presentation.subject)
self.assertContains(response, self.presentation.author.username)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test import RequestFactory
from presentation.models import Presentation
from presentation.views import PresentationList
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
presentation_list_url = reverse('presentation:list')
request = self.factory.get(presentation_list_url)
request.user = AnonymousUser()
response = PresentationList.as_view()(request)
self.assertIn(response, self.presentation.subject)
self.assertIn(response, self.presentation.author.name)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
<commit_msg>Fix test case with client<commit_after>from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from presentation.models import Presentation
from warp.users.models import User
class PresentationListTest(TestCase):
def setUp(self):
self.client = Client()
self.test_user = self.create_test_user()
self.presentation = Presentation(subject="subject",
author=self.test_user,
markdown="#Abcdefghijklmno",
html="<h1>Abcdefghijklmno</h1>")
for _ in range(0, 20):
self.presentation.save()
def test_get_presentation_list_page(self):
response = self.client.get(reverse('presentation:list'))
self.assertContains(response, self.presentation.subject)
self.assertContains(response, self.presentation.author.username)
@staticmethod
def create_test_user():
name = "Name name"
username = "username"
first_name = "Name"
last_name = "name"
email = "test@example.com"
is_staff = False
is_active = True
user = User(name=name,
username=username,
first_name=first_name,
last_name=last_name,
email=email,
is_staff=is_staff,
is_active=is_active)
user.save()
return user
|
1a389d686b2dfb1234dff240201e6436d920c131
|
posts/config.py
|
posts/config.py
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts-test"
DEBUG = True
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts-test"
DEBUG = True
|
Update database URIs for C9.
|
Update database URIs for C9.
|
Python
|
mit
|
j10sanders/crossword,j10sanders/crossword,j10sanders/crossword
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts-test"
DEBUG = True
Update database URIs for C9.
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts-test"
DEBUG = True
|
<commit_before>class DevelopmentConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts-test"
DEBUG = True
<commit_msg>Update database URIs for C9.<commit_after>
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts-test"
DEBUG = True
|
class DevelopmentConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts-test"
DEBUG = True
Update database URIs for C9.class DevelopmentConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts-test"
DEBUG = True
|
<commit_before>class DevelopmentConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://action:action@localhost:5432/posts-test"
DEBUG = True
<commit_msg>Update database URIs for C9.<commit_after>class DevelopmentConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts"
DEBUG = True
class TestingConfig(object):
DATABASE_URI = "postgresql://ubuntu:thinkful@localhost:5432/posts-test"
DEBUG = True
|
b307bb4e59598670cf2ee09c4107d9f42d8b7d3c
|
warehouse/database/mixins.py
|
warehouse/database/mixins.py
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql.expression import text
from warehouse import db
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from warehouse import db
from warehouse.database.schema import TableDDL
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
class TimeStampedMixin(object):
__table_args__ = (
TableDDL("""
CREATE OR REPLACE FUNCTION update_modified_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified = now();
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER update_%(table)s_modtime
BEFORE UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE update_modified_column();
"""),
)
created = db.Column(db.DateTime, server_default=func.now(), nullable=False)
modified = db.Column(db.DateTime, server_default=func.now(), nullable=False)
|
Add a database mixin for Timestamping models
|
Add a database mixin for Timestamping models
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql.expression import text
from warehouse import db
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
Add a database mixin for Timestamping models
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from warehouse import db
from warehouse.database.schema import TableDDL
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
class TimeStampedMixin(object):
__table_args__ = (
TableDDL("""
CREATE OR REPLACE FUNCTION update_modified_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified = now();
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER update_%(table)s_modtime
BEFORE UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE update_modified_column();
"""),
)
created = db.Column(db.DateTime, server_default=func.now(), nullable=False)
modified = db.Column(db.DateTime, server_default=func.now(), nullable=False)
|
<commit_before>from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql.expression import text
from warehouse import db
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
<commit_msg>Add a database mixin for Timestamping models<commit_after>
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from warehouse import db
from warehouse.database.schema import TableDDL
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
class TimeStampedMixin(object):
__table_args__ = (
TableDDL("""
CREATE OR REPLACE FUNCTION update_modified_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified = now();
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER update_%(table)s_modtime
BEFORE UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE update_modified_column();
"""),
)
created = db.Column(db.DateTime, server_default=func.now(), nullable=False)
modified = db.Column(db.DateTime, server_default=func.now(), nullable=False)
|
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql.expression import text
from warehouse import db
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
Add a database mixin for Timestamping modelsfrom sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from warehouse import db
from warehouse.database.schema import TableDDL
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
class TimeStampedMixin(object):
__table_args__ = (
TableDDL("""
CREATE OR REPLACE FUNCTION update_modified_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified = now();
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER update_%(table)s_modtime
BEFORE UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE update_modified_column();
"""),
)
created = db.Column(db.DateTime, server_default=func.now(), nullable=False)
modified = db.Column(db.DateTime, server_default=func.now(), nullable=False)
|
<commit_before>from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql.expression import text
from warehouse import db
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
<commit_msg>Add a database mixin for Timestamping models<commit_after>from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import text
from warehouse import db
from warehouse.database.schema import TableDDL
class UUIDPrimaryKeyMixin(object):
id = db.Column(pg.UUID(as_uuid=True),
primary_key=True, server_default=text("uuid_generate_v4()"))
class TimeStampedMixin(object):
__table_args__ = (
TableDDL("""
CREATE OR REPLACE FUNCTION update_modified_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.modified = now();
RETURN NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER update_%(table)s_modtime
BEFORE UPDATE
ON %(table)s
FOR EACH ROW
EXECUTE PROCEDURE update_modified_column();
"""),
)
created = db.Column(db.DateTime, server_default=func.now(), nullable=False)
modified = db.Column(db.DateTime, server_default=func.now(), nullable=False)
|
22823ca55e4c342149b83d84d18ad879d55023d7
|
oslib/__init__.py
|
oslib/__init__.py
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
Add find import to oslib init
|
Add find import to oslib init
|
Python
|
mit
|
Caian/ostools
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
Add find import to oslib init
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
<commit_before>#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
<commit_msg>Add find import to oslib init<commit_after>
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
Add find import to oslib init#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
<commit_before>#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
<commit_msg>Add find import to oslib init<commit_after>#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2017 Caian Benedicto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from .state import OSState
from .user import OSUser
from .find import findff, findfr, findrf, findrr, superfind
|
c37eccdf135fbbcb4d153f3e94c916cd0e9361ea
|
imagersite/imagersite/urls.py
|
imagersite/imagersite/urls.py
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
Add accounts url for registration backend
|
Add accounts url for registration backend
|
Python
|
mit
|
jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
Add accounts url for registration backend
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
<commit_before>"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
<commit_msg>Add accounts url for registration backend<commit_after>
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
Add accounts url for registration backend"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
<commit_before>"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
<commit_msg>Add accounts url for registration backend<commit_after>"""imagersite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('registration.backends.default.urls')),
]
if settings.DEBUG:
urlpatterns += static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
|
6e89594c231698d4f20590e723a876699876fb52
|
utils/__init__.py
|
utils/__init__.py
|
from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
# pylint: disable=unused-import
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
Make Pylint ignore some unused imports
|
Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work
|
Python
|
mit
|
wolfy1339/Python-IRC-Bot
|
from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work
|
# pylint: disable=unused-import
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
<commit_before>from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
<commit_msg>Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work<commit_after>
|
# pylint: disable=unused-import
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work# pylint: disable=unused-import
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
<commit_before>from . import util
from . import irc
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
<commit_msg>Make Pylint ignore some unused imports
Ignore them because they are required for the bot to work<commit_after># pylint: disable=unused-import
from . import util
from . import irc
# pylint: enable=unused-import
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
|
5c9a121d9ab8b66c49d7a58d24805c1eecfc8a71
|
ce/common.py
|
ce/common.py
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __eq__(self, other):
return not self < other and not other < self
def __ne__(self, other):
return self < other or other < self
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not other < self
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __ne__(self, other):
return not self.__eq__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __gt__(self, other):
return not self.__eq__(other) and not self.__lt__(other)
def __le__(self, other):
return not self.__gt__(other)
|
Fix infinite recursion of comparing Comparable
|
Fix infinite recursion of comparing Comparable
|
Python
|
mit
|
admk/soap
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __eq__(self, other):
return not self < other and not other < self
def __ne__(self, other):
return self < other or other < self
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not other < self
Fix infinite recursion of comparing Comparable
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __ne__(self, other):
return not self.__eq__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __gt__(self, other):
return not self.__eq__(other) and not self.__lt__(other)
def __le__(self, other):
return not self.__gt__(other)
|
<commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __eq__(self, other):
return not self < other and not other < self
def __ne__(self, other):
return self < other or other < self
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not other < self
<commit_msg>Fix infinite recursion of comparing Comparable<commit_after>
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __ne__(self, other):
return not self.__eq__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __gt__(self, other):
return not self.__eq__(other) and not self.__lt__(other)
def __le__(self, other):
return not self.__gt__(other)
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __eq__(self, other):
return not self < other and not other < self
def __ne__(self, other):
return self < other or other < self
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not other < self
Fix infinite recursion of comparing Comparable#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __ne__(self, other):
return not self.__eq__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __gt__(self, other):
return not self.__eq__(other) and not self.__lt__(other)
def __le__(self, other):
return not self.__gt__(other)
|
<commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __eq__(self, other):
return not self < other and not other < self
def __ne__(self, other):
return self < other or other < self
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not other < self
<commit_msg>Fix infinite recursion of comparing Comparable<commit_after>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import inspect
class DynamicMethods(object):
def list_methods(self, predicate):
"""Find all transform methods within the class that satisfies the
predicate.
Returns:
A list of tuples containing method names and corresponding methods
that can be called with a tree as the argument for each method.
"""
methods = [member[0] for member in inspect.getmembers(
self.__class__, predicate=inspect.ismethod)]
return [getattr(self, method) for method in methods
if not method.startswith('_') and method != 'list_methods' and
predicate(method)]
class Comparable(object):
def __ne__(self, other):
return not self.__eq__(other)
def __ge__(self, other):
return not self.__lt__(other)
def __gt__(self, other):
return not self.__eq__(other) and not self.__lt__(other)
def __le__(self, other):
return not self.__gt__(other)
|
a49e697e45a0c7a678a852f0c9215bd7a3fa24bf
|
tests/backends/test_macOS.py
|
tests/backends/test_macOS.py
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
@pytest.mark.xfail
def test_alternate_keychain(self):
alt = self.keyring.with_keychain('abcd')
assert alt.keychain == 'abcd'
assert self.keyring.keychain is None
|
Add test capturing need for an alternate keyring with an alternate keychain.
|
Add test capturing need for an alternate keyring with an alternate keychain.
|
Python
|
mit
|
jaraco/keyring
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
Add test capturing need for an alternate keyring with an alternate keychain.
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
@pytest.mark.xfail
def test_alternate_keychain(self):
alt = self.keyring.with_keychain('abcd')
assert alt.keychain == 'abcd'
assert self.keyring.keychain is None
|
<commit_before>import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
<commit_msg>Add test capturing need for an alternate keyring with an alternate keychain.<commit_after>
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
@pytest.mark.xfail
def test_alternate_keychain(self):
alt = self.keyring.with_keychain('abcd')
assert alt.keychain == 'abcd'
assert self.keyring.keychain is None
|
import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
Add test capturing need for an alternate keyring with an alternate keychain.import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
@pytest.mark.xfail
def test_alternate_keychain(self):
alt = self.keyring.with_keychain('abcd')
assert alt.keychain == 'abcd'
assert self.keyring.keychain is None
|
<commit_before>import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
<commit_msg>Add test capturing need for an alternate keyring with an alternate keychain.<commit_after>import pytest
import keyring
from keyring.testing.backend import BackendBasicTests
from keyring.backends import macOS
@pytest.mark.skipif(
not keyring.backends.macOS.Keyring.viable,
reason="macOS backend not viable",
)
class Test_macOSKeychain(BackendBasicTests):
def init_keyring(self):
return macOS.Keyring()
@pytest.mark.xfail
def test_alternate_keychain(self):
alt = self.keyring.with_keychain('abcd')
assert alt.keychain == 'abcd'
assert self.keyring.keychain is None
|
9d35218506368702ac33d78be197ee3151d24ed9
|
ledger_type.py
|
ledger_type.py
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_order = 'name'
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
Order by ledger types by name
|
Order by ledger types by name
|
Python
|
agpl-3.0
|
xcgd/alternate_ledger,xcgd/alternate_ledger
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
Order by ledger types by name
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_order = 'name'
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
<commit_before>from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
<commit_msg>Order by ledger types by name<commit_after>
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_order = 'name'
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
Order by ledger types by namefrom openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_order = 'name'
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
<commit_before>from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
<commit_msg>Order by ledger types by name<commit_after>from openerp.osv import fields, osv
from openerp.tools.translate import _
_enum_ledger_type = [
('ledger_a', _('Ledger A')),
('ledger_b', _('Ledger B')),
('ledger_c', _('Ledger C')),
('ledger_d', _('Ledger D')),
('ledger_e', _('Ledger E')),
]
class ledger_type(osv.Model):
_name = 'alternate_ledger.ledger_type'
_columns = {
'name': fields.char(
_('Name'), size=256, required=True),
'type': fields.selection(
_enum_ledger_type, _('Ledger Type'), required=True),
}
_order = 'name'
_sql_constraint = [
('name', "UNIQUE('name')", 'Name has to be unique !'),
]
|
e5eaf68490098cb89cf9d6ad8b4eaa96bafd0450
|
compose/cli/docker_client.py
|
compose/cli/docker_client.py
|
import logging
import os
import ssl
from docker import Client
from docker import tls
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.19')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
|
import logging
import os
from docker import Client
from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
kwargs = kwargs_from_env(assert_hostname=False)
kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19')
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)
|
Remove custom docker client initialization logic
|
Remove custom docker client initialization logic
Signed-off-by: Aanand Prasad <94fc4f3e3d0be608b3ed7b8529ff28d2a445cce1@gmail.com>
|
Python
|
apache-2.0
|
phiroict/docker,denverdino/docker.github.io,phiroict/docker,denverdino/docker.github.io,jzwlqx/denverdino.github.io,docker/docker.github.io,shin-/docker.github.io,swoopla/compose,rillig/docker.github.io,jiekechoo/compose,bdwill/docker.github.io,joeuo/docker.github.io,sanscontext/docker.github.io,amitsaha/compose,GM-Alex/compose,mohitsoni/compose,charleswhchan/compose,troy0820/docker.github.io,thaJeztah/compose,kikkomep/compose,KevinGreene/compose,thaJeztah/docker.github.io,phiroict/docker,joaofnfernandes/docker.github.io,goloveychuk/compose,docker-zh/docker.github.io,sdurrheimer/compose,talolard/compose,alexisbellido/docker.github.io,londoncalling/docker.github.io,denverdino/docker.github.io,londoncalling/docker.github.io,mnowster/compose,londoncalling/docker.github.io,dbdd4us/compose,goloveychuk/compose,johnstep/docker.github.io,sanscontext/docker.github.io,swoopla/compose,rillig/docker.github.io,j-fuentes/compose,twitherspoon/compose,menglingwei/denverdino.github.io,danix800/docker.github.io,shubheksha/docker.github.io,viranch/compose,TomasTomecek/compose,bdwill/docker.github.io,denverdino/denverdino.github.io,jzwlqx/denverdino.github.io,shubheksha/docker.github.io,j-fuentes/compose,albers/compose,viranch/compose,funkyfuture/docker-compose,londoncalling/docker.github.io,joeuo/docker.github.io,johnstep/docker.github.io,docker-zh/docker.github.io,bdwill/docker.github.io,au-phiware/compose,jiekechoo/compose,moxiegirl/compose,phiroict/docker,docker/docker.github.io,denverdino/denverdino.github.io,jeanpralo/compose,kojiromike/compose,tiry/compose,aduermael/docker.github.io,albers/compose,twitherspoon/compose,denverdino/compose,mdaue/compose,kikkomep/compose,TomasTomecek/compose,denverdino/denverdino.github.io,johnstep/docker.github.io,londoncalling/docker.github.io,mrfuxi/compose,alexandrev/compose,hoogenm/compose,troy0820/docker.github.io,jeanpralo/compose,joaofnfernandes/docker.github.io,schmunk42/compose,LuisBosquez/docker.github.io,alexisbellido/docker.github.io,andrewgee/compose,vdemeester/compose,joaofnfernandes/docker.github.io,shin-/compose,jorgeLuizChaves/compose,shubheksha/docker.github.io,BSWANG/denverdino.github.io,KalleDK/compose,menglingwei/denverdino.github.io,jonaseck2/compose,shin-/docker.github.io,anweiss/docker.github.io,jrabbit/compose,dnephin/compose,mnowster/compose,amitsaha/compose,shin-/docker.github.io,docker-zh/docker.github.io,LuisBosquez/docker.github.io,joaofnfernandes/docker.github.io,BSWANG/denverdino.github.io,bdwill/docker.github.io,au-phiware/compose,vdemeester/compose,anweiss/docker.github.io,mohitsoni/compose,aduermael/docker.github.io,aduermael/docker.github.io,denverdino/docker.github.io,alexisbellido/docker.github.io,tiry/compose,anweiss/docker.github.io,shubheksha/docker.github.io,hoogenm/compose,JimGalasyn/docker.github.io,jorgeLuizChaves/compose,michael-k/docker-compose,funkyfuture/docker-compose,ChrisChinchilla/compose,talolard/compose,sdurrheimer/compose,thaJeztah/docker.github.io,alexisbellido/docker.github.io,kojiromike/compose,joeuo/docker.github.io,KevinGreene/compose,thaJeztah/docker.github.io,joeuo/docker.github.io,thaJeztah/compose,alexandrev/compose,troy0820/docker.github.io,docker/docker.github.io,danix800/docker.github.io,LuisBosquez/docker.github.io,GM-Alex/compose,sanscontext/docker.github.io,troy0820/docker.github.io,sanscontext/docker.github.io,shubheksha/docker.github.io,docker-zh/docker.github.io,rillig/docker.github.io,JimGalasyn/docker.github.io,schmunk42/compose,jzwlqx/denverdino.github.io,moxiegirl/compose,phiroict/docker,BSWANG/denverdino.github.io,rgbkrk/compose,dbdd4us/compose,thaJeztah/docker.github.io,michael-k/docker-compose,KalleDK/compose,jzwlqx/denverdino.github.io,johnstep/docker.github.io,gdevillele/docker.github.io,BSWANG/denverdino.github.io,ChrisChinchilla/compose,JimGalasyn/docker.github.io,gdevillele/docker.github.io,danix800/docker.github.io,gdevillele/docker.github.io,mrfuxi/compose,shin-/docker.github.io,aduermael/docker.github.io,docker/docker.github.io,sanscontext/docker.github.io,docker-zh/docker.github.io,danix800/docker.github.io,JimGalasyn/docker.github.io,denverdino/compose,dnephin/compose,shin-/docker.github.io,rillig/docker.github.io,gdevillele/docker.github.io,rgbkrk/compose,LuisBosquez/docker.github.io,anweiss/docker.github.io,LuisBosquez/docker.github.io,bdwill/docker.github.io,JimGalasyn/docker.github.io,denverdino/denverdino.github.io,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,denverdino/docker.github.io,johnstep/docker.github.io,docker/docker.github.io,menglingwei/denverdino.github.io,mdaue/compose,jonaseck2/compose,gdevillele/docker.github.io,anweiss/docker.github.io,BSWANG/denverdino.github.io,menglingwei/denverdino.github.io,thaJeztah/docker.github.io,shin-/compose,joaofnfernandes/docker.github.io,alexisbellido/docker.github.io,charleswhchan/compose,andrewgee/compose,denverdino/denverdino.github.io,jrabbit/compose,joeuo/docker.github.io
|
import logging
import os
import ssl
from docker import Client
from docker import tls
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.19')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
Remove custom docker client initialization logic
Signed-off-by: Aanand Prasad <94fc4f3e3d0be608b3ed7b8529ff28d2a445cce1@gmail.com>
|
import logging
import os
from docker import Client
from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
kwargs = kwargs_from_env(assert_hostname=False)
kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19')
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)
|
<commit_before>import logging
import os
import ssl
from docker import Client
from docker import tls
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.19')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
<commit_msg>Remove custom docker client initialization logic
Signed-off-by: Aanand Prasad <94fc4f3e3d0be608b3ed7b8529ff28d2a445cce1@gmail.com><commit_after>
|
import logging
import os
from docker import Client
from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
kwargs = kwargs_from_env(assert_hostname=False)
kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19')
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)
|
import logging
import os
import ssl
from docker import Client
from docker import tls
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.19')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
Remove custom docker client initialization logic
Signed-off-by: Aanand Prasad <94fc4f3e3d0be608b3ed7b8529ff28d2a445cce1@gmail.com>import logging
import os
from docker import Client
from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
kwargs = kwargs_from_env(assert_hostname=False)
kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19')
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)
|
<commit_before>import logging
import os
import ssl
from docker import Client
from docker import tls
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.19')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
<commit_msg>Remove custom docker client initialization logic
Signed-off-by: Aanand Prasad <94fc4f3e3d0be608b3ed7b8529ff28d2a445cce1@gmail.com><commit_after>import logging
import os
from docker import Client
from docker.utils import kwargs_from_env
from ..const import HTTP_TIMEOUT
log = logging.getLogger(__name__)
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
if 'DOCKER_CLIENT_TIMEOUT' in os.environ:
log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.')
kwargs = kwargs_from_env(assert_hostname=False)
kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19')
kwargs['timeout'] = HTTP_TIMEOUT
return Client(**kwargs)
|
03b4575a60cab53629c62eca5df5acdd9688fbbb
|
project/views/twilioviews.py
|
project/views/twilioviews.py
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
Add print debug to text function
|
Add print debug to text function
|
Python
|
apache-2.0
|
tjcsl/mhacksiv
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
Add print debug to text function
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
<commit_before>from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
<commit_msg>Add print debug to text function<commit_after>
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
Add print debug to text functionfrom flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
<commit_before>from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
<commit_msg>Add print debug to text function<commit_after>from flask import request
import requests
from ..utils.status import get_status
from ..utils.reminders import create_reminder
import twilio.twiml
import json
from datetime import datetime
def call():
resp = twilio.twiml.Response()
resp.record(timeout=10, transcribe=True,
transcribeCallback='http://queri.me/rec', )
return str(resp)
def text():
b = request.form.get('Body','')
phone = request.form.get('From','')
wit = requests.get('https://api.wit.ai/message?v=20140905&q=%s' % b, headers={'Authorization':'Bearer L3VB34V6YTDFO4BRXNDQNAYMVOOF4BHB'}).text
intent = json.loads(wit)['outcomes'][0]['intent']
print json.loads(wit)
if intent == 'get_status':
m = get_status(wit, phone)
elif intent == 'remind':
entities = json.loads(wit)['outcomes'][0]['entities']
date = datetime.strptime(entities['time'][0]['value']['from'],"%Y-%m-%dT%H:%M:%S.Z")
text = entities['message']
m = create_reminder(date, text, phone)
else:
m = "Hmm? Try again please :("
# Send to wit.ai for processing
resp = twilio.twiml.Response()
resp.message(m)
return str(resp)
def rec():
print request.form.get('TranscriptionText','')
return ''
|
ee17ff42931e718d77ac2180b23e750bedcd31d4
|
test/test_searchentities.py
|
test/test_searchentities.py
|
import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
|
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
Add a test for wscompat conversion
|
Add a test for wscompat conversion
|
Python
|
mit
|
jeffweeksio/sir
|
import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
Add a test for wscompat conversion
|
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
<commit_before>import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
<commit_msg>Add a test for wscompat conversion<commit_after>
|
import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
Add a test for wscompat conversionimport mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
<commit_before>import unittest
from . import models
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
<commit_msg>Add a test for wscompat conversion<commit_after>import mock
import unittest
from . import models
from xml.etree.ElementTree import Element, tostring
from sir.schema.searchentities import SearchEntity as E, SearchField as F
class QueryResultToDictTest(unittest.TestCase):
def setUp(self):
self.entity = E(models.B, [
F("id", "id"),
F("c_bar", "c.bar"),
F("c_bar_trans", "c.bar", transformfunc=lambda v:
v.union(set(["yay"])))
],
1.1
)
self.expected = {
"id": 1,
"c_bar": "foo",
"c_bar_trans": set(["foo", "yay"]),
}
c = models.C(id=2, bar="foo")
self.val = models.B(id=1, c=c)
def test_fields(self):
res = self.entity.query_result_to_dict(self.val)
self.assertDictEqual(self.expected, res)
def test_conversion(self):
elem = Element("testelem", text="text")
convmock = mock.Mock()
convmock.to_etree.return_value = elem
self.entity.compatconverter = lambda x: convmock
res = self.entity.query_result_to_dict(self.val)
self.expected["_store"] = tostring(elem)
self.assertDictEqual(self.expected, res)
self.assertEqual(convmock.to_etree.call_count, 1)
|
6637d13202e647058296e0a8a5606bf64598b63e
|
Lib/misc/setup.py
|
Lib/misc/setup.py
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
Remove extra noise on install.
|
Remove extra noise on install.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1549 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
Python
|
bsd-3-clause
|
scipy/scipy-svn,jasonmccampbell/scipy-refactor,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,lesserwhirls/scipy-cwt,scipy/scipy-svn
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
Remove extra noise on install.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1549 d6536bca-fef9-0310-8506-e4c0a848fbcf
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
<commit_before>
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
<commit_msg>Remove extra noise on install.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1549 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
Remove extra noise on install.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1549 d6536bca-fef9-0310-8506-e4c0a848fbcf
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
<commit_before>
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
<commit_msg>Remove extra noise on install.
git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@1549 d6536bca-fef9-0310-8506-e4c0a848fbcf<commit_after>
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
ccf3bcfc962a37d088507b542bd8e3af2ce515b6
|
tests/test_with_testcase.py
|
tests/test_with_testcase.py
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
self.benchmark_weave('time.sleep')
time.sleep(0.0000001)
|
Remove use of context manager.
|
Remove use of context manager.
|
Python
|
bsd-2-clause
|
thedrow/pytest-benchmark,aldanor/pytest-benchmark,SectorLabs/pytest-benchmark,ionelmc/pytest-benchmark
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
Remove use of context manager.
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
self.benchmark_weave('time.sleep')
time.sleep(0.0000001)
|
<commit_before>import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
<commit_msg>Remove use of context manager.<commit_after>
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
self.benchmark_weave('time.sleep')
time.sleep(0.0000001)
|
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
Remove use of context manager.import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
self.benchmark_weave('time.sleep')
time.sleep(0.0000001)
|
<commit_before>import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
<commit_msg>Remove use of context manager.<commit_after>import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
self.benchmark_weave('time.sleep')
time.sleep(0.0000001)
|
8a9a0f1dc277d26767ffc3f34c00c18d00bd5e2e
|
conanfile.py
|
conanfile.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
Fix missing generator on Windows
|
Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
Python
|
mit
|
ColinH/PEGTL,ColinH/PEGTL
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
<commit_msg>Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
<commit_msg>Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "taocpp@icemx.net"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
153688b63103a024b126a7c92eb9d0816500d2dc
|
ircstat/ent.py
|
ircstat/ent.py
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
Update Struct.__repr__ to show subclass names
|
Update Struct.__repr__ to show subclass names
|
Python
|
mit
|
jreese/ircstat,jreese/ircstat
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
Update Struct.__repr__ to show subclass names
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
<commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
<commit_msg>Update Struct.__repr__ to show subclass names<commit_after>
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
Update Struct.__repr__ to show subclass names# Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
<commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
<commit_msg>Update Struct.__repr__ to show subclass names<commit_after># Copyright 2013 John Reese
# Licensed under the MIT license
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
4a17579478bce6e4c51fc0f6506b090cfea0dfb5
|
pyjsonts/time_series_json.py
|
pyjsonts/time_series_json.py
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
Make whitespaces before TimeSeriesJSON for PEP8
|
Make whitespaces before TimeSeriesJSON for PEP8
|
Python
|
apache-2.0
|
jeongmincha/pyjsonts
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
Make whitespaces before TimeSeriesJSON for PEP8
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
<commit_before>import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
<commit_msg>Make whitespaces before TimeSeriesJSON for PEP8<commit_after>
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
Make whitespaces before TimeSeriesJSON for PEP8import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
<commit_before>import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
<commit_msg>Make whitespaces before TimeSeriesJSON for PEP8<commit_after>import json
import ijson
class TimeSeriesJSON:
def __init__(self, f=None, fn=None, tag='item'):
"""
:param f: file object (_io.TextIOWrapper)
:param fn: file name as a string
:param tag: tag for dividing json items
default value is 'item' because this value is default in ijson
"""
if f is not None:
self.__type = 'file'
self.__file = f
elif fn is not None:
self.__type = 'file_name'
self.__file_name = fn
self.__file = open(fn)
self.__items = self.parse_json_items(tag)
def parse_json_items(self, tag, limit=0):
self.__items = []
self.__file.seek(0)
cnt = 0
objs = ijson.items(self.__file, tag)
for obj in objs:
item = json.dumps(obj, \
sort_keys=True, \
indent=4, \
ensure_ascii=True)
self.__items.append(item)
cnt += 1
if limit != 0 and cnt >= limit:
break
return self.__items
|
497313620772c1cb0d520be1a0024c12ca02742e
|
tests/python_tests/fontset_test.py
|
tests/python_tests/fontset_test.py
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
def test_loading_fontset_from_python():
m = mapnik.Map(256,256)
fset = mapnik.FontSet('my-set')
fset.add_face_name('Comic Sans')
fset.add_face_name('Papyrus')
m.append_fontset('my-set', fset)
sty = mapnik.Style()
rule = mapnik.Rule()
tsym = mapnik.TextSymbolizer()
tsym.fontset = fset
rule.symbols.append(tsym)
sty.rules.append(rule)
m.append_style('Style',sty)
serialized_map = mapnik.save_map_to_string(m)
eq_('fontset-name="my-set"' in serialized_map,True)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
|
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
|
Python
|
lgpl-2.1
|
Mappy/mapnik,qianwenming/mapnik,tomhughes/mapnik,jwomeara/mapnik,pnorman/mapnik,davenquinn/python-mapnik,yiqingj/work,pnorman/mapnik,Mappy/mapnik,yohanboniface/python-mapnik,mapycz/python-mapnik,jwomeara/mapnik,Mappy/mapnik,yiqingj/work,strk/mapnik,kapouer/mapnik,Mappy/mapnik,qianwenming/mapnik,lightmare/mapnik,garnertb/python-mapnik,strk/mapnik,cjmayo/mapnik,lightmare/mapnik,Uli1/mapnik,mapycz/python-mapnik,zerebubuth/mapnik,manz/python-mapnik,CartoDB/mapnik,rouault/mapnik,stefanklug/mapnik,garnertb/python-mapnik,yohanboniface/python-mapnik,cjmayo/mapnik,manz/python-mapnik,pramsey/mapnik,tomhughes/mapnik,mapnik/mapnik,kapouer/mapnik,qianwenming/mapnik,yiqingj/work,mapnik/python-mapnik,garnertb/python-mapnik,rouault/mapnik,pnorman/mapnik,naturalatlas/mapnik,Uli1/mapnik,tomhughes/mapnik,strk/mapnik,manz/python-mapnik,mbrukman/mapnik,davenquinn/python-mapnik,pramsey/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,lightmare/mapnik,zerebubuth/mapnik,zerebubuth/mapnik,Airphrame/mapnik,mbrukman/mapnik,CartoDB/mapnik,mapycz/mapnik,sebastic/python-mapnik,rouault/mapnik,kapouer/mapnik,Uli1/mapnik,mbrukman/mapnik,mapnik/mapnik,stefanklug/mapnik,mapnik/python-mapnik,lightmare/mapnik,Uli1/mapnik,kapouer/mapnik,jwomeara/mapnik,whuaegeanse/mapnik,CartoDB/mapnik,mapycz/mapnik,qianwenming/mapnik,davenquinn/python-mapnik,whuaegeanse/mapnik,yiqingj/work,strk/mapnik,pnorman/mapnik,rouault/mapnik,mapnik/python-mapnik,mbrukman/mapnik,pramsey/mapnik,naturalatlas/mapnik,cjmayo/mapnik,naturalatlas/mapnik,yohanboniface/python-mapnik,sebastic/python-mapnik,tomhughes/mapnik,cjmayo/mapnik,stefanklug/mapnik,jwomeara/mapnik,mapnik/mapnik,qianwenming/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,mapnik/mapnik,Airphrame/mapnik,mapycz/mapnik,pramsey/mapnik,sebastic/python-mapnik,stefanklug/mapnik,naturalatlas/mapnik
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
def test_loading_fontset_from_python():
m = mapnik.Map(256,256)
fset = mapnik.FontSet('my-set')
fset.add_face_name('Comic Sans')
fset.add_face_name('Papyrus')
m.append_fontset('my-set', fset)
sty = mapnik.Style()
rule = mapnik.Rule()
tsym = mapnik.TextSymbolizer()
tsym.fontset = fset
rule.symbols.append(tsym)
sty.rules.append(rule)
m.append_style('Style',sty)
serialized_map = mapnik.save_map_to_string(m)
eq_('fontset-name="my-set"' in serialized_map,True)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
<commit_before>#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
<commit_msg>Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized<commit_after>
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
def test_loading_fontset_from_python():
m = mapnik.Map(256,256)
fset = mapnik.FontSet('my-set')
fset.add_face_name('Comic Sans')
fset.add_face_name('Papyrus')
m.append_fontset('my-set', fset)
sty = mapnik.Style()
rule = mapnik.Rule()
tsym = mapnik.TextSymbolizer()
tsym.fontset = fset
rule.symbols.append(tsym)
sty.rules.append(rule)
m.append_style('Style',sty)
serialized_map = mapnik.save_map_to_string(m)
eq_('fontset-name="my-set"' in serialized_map,True)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
def test_loading_fontset_from_python():
m = mapnik.Map(256,256)
fset = mapnik.FontSet('my-set')
fset.add_face_name('Comic Sans')
fset.add_face_name('Papyrus')
m.append_fontset('my-set', fset)
sty = mapnik.Style()
rule = mapnik.Rule()
tsym = mapnik.TextSymbolizer()
tsym.fontset = fset
rule.symbols.append(tsym)
sty.rules.append(rule)
m.append_style('Style',sty)
serialized_map = mapnik.save_map_to_string(m)
eq_('fontset-name="my-set"' in serialized_map,True)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
<commit_before>#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
<commit_msg>Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized<commit_after>#!/usr/bin/env python
from nose.tools import *
from utilities import execution_path
import os, mapnik
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test_loading_fontset_from_map():
m = mapnik.Map(256,256)
mapnik.load_map(m,'../data/good_maps/fontset.xml',True)
fs = m.find_fontset('book-fonts')
eq_(len(fs.names),2)
eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique'])
def test_loading_fontset_from_python():
m = mapnik.Map(256,256)
fset = mapnik.FontSet('my-set')
fset.add_face_name('Comic Sans')
fset.add_face_name('Papyrus')
m.append_fontset('my-set', fset)
sty = mapnik.Style()
rule = mapnik.Rule()
tsym = mapnik.TextSymbolizer()
tsym.fontset = fset
rule.symbols.append(tsym)
sty.rules.append(rule)
m.append_style('Style',sty)
serialized_map = mapnik.save_map_to_string(m)
eq_('fontset-name="my-set"' in serialized_map,True)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
|
0141340d2abddc954ea4388fe31629d98189632c
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
|
Add test for storing dictionaries on ValidationError
|
Add test for storing dictionaries on ValidationError
|
Python
|
mit
|
0xDCA/marshmallow,Bachmann1234/marshmallow,xLegoz/marshmallow,bartaelterman/marshmallow,VladimirPal/marshmallow,daniloakamine/marshmallow,mwstobo/marshmallow,dwieeb/marshmallow,etataurov/marshmallow,maximkulkin/marshmallow,marshmallow-code/marshmallow,0xDCA/marshmallow,quxiaolong1504/marshmallow,Tim-Erwin/marshmallow
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
Add test for storing dictionaries on ValidationError
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
|
<commit_before># -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
<commit_msg>Add test for storing dictionaries on ValidationError<commit_after>
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
|
# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
Add test for storing dictionaries on ValidationError# -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
|
<commit_before># -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
<commit_msg>Add test for storing dictionaries on ValidationError<commit_after># -*- coding: utf-8 -*-
from marshmallow.exceptions import ValidationError
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
|
1181829a0b4e4d395693ad6856876b4be8b6bb90
|
ratechecker/migrations/0002_remove_fee_loader.py
|
ratechecker/migrations/0002_remove_fee_loader.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
Comment out fix_fee_product_index from migration
|
Comment out fix_fee_product_index from migration
|
Python
|
cc0-1.0
|
cfpb/owning-a-home-api
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Comment out fix_fee_product_index from migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Comment out fix_fee_product_index from migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Comment out fix_fee_product_index from migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Comment out fix_fee_product_index from migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.