commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4ad06836e009309c7b5c00f0f932f9db38dff15c | examples/weather/gensvcbind.py | examples/weather/gensvcbind.py | import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
import pyxb.standard.bindings.soaphttp as soaphttp
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
spec = wsdl.definitions.CreateFromDOM(doc.documentElement)
print pyxb.Namespace.AvailableForLoad()
for t in spec.types():
for t2 in t.wildcardElements():
if isinstance(t2, Node):
print t2.namespaceURI
attribute_map.update(domutils.AttributeMap(t2))
ns = pyxb.Namespace.NamespaceForURI(t2.namespaceURI)
type_spec = xs.schema.CreateFromDOM(t2, attribute_map)
print type_spec
open('weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=type_spec))
sys.exit(0)
| import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
try:
spec = wsdl.definitions.CreateFromDOM(doc.documentElement, process_schema=True)
except Exception, e:
print 'ERROR building schema: %s' % (e,)
sys.exit(1)
open('raw_weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=spec.schema()))
| Update how schema is built | Update how schema is built
| Python | apache-2.0 | balanced/PyXB,CantemoInternal/pyxb,pabigot/pyxb,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,CantemoInternal/pyxb,jonfoster/pyxb1,jonfoster/pyxb2,jonfoster/pyxb2,balanced/PyXB,CantemoInternal/pyxb,balanced/PyXB,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb-upstream-mirror,pabigot/pyxb,jonfoster/pyxb2 | import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
import pyxb.standard.bindings.soaphttp as soaphttp
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
spec = wsdl.definitions.CreateFromDOM(doc.documentElement)
print pyxb.Namespace.AvailableForLoad()
for t in spec.types():
for t2 in t.wildcardElements():
if isinstance(t2, Node):
print t2.namespaceURI
attribute_map.update(domutils.AttributeMap(t2))
ns = pyxb.Namespace.NamespaceForURI(t2.namespaceURI)
type_spec = xs.schema.CreateFromDOM(t2, attribute_map)
print type_spec
open('weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=type_spec))
sys.exit(0)
Update how schema is built | import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
try:
spec = wsdl.definitions.CreateFromDOM(doc.documentElement, process_schema=True)
except Exception, e:
print 'ERROR building schema: %s' % (e,)
sys.exit(1)
open('raw_weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=spec.schema()))
| <commit_before>import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
import pyxb.standard.bindings.soaphttp as soaphttp
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
spec = wsdl.definitions.CreateFromDOM(doc.documentElement)
print pyxb.Namespace.AvailableForLoad()
for t in spec.types():
for t2 in t.wildcardElements():
if isinstance(t2, Node):
print t2.namespaceURI
attribute_map.update(domutils.AttributeMap(t2))
ns = pyxb.Namespace.NamespaceForURI(t2.namespaceURI)
type_spec = xs.schema.CreateFromDOM(t2, attribute_map)
print type_spec
open('weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=type_spec))
sys.exit(0)
<commit_msg>Update how schema is built<commit_after> | import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
try:
spec = wsdl.definitions.CreateFromDOM(doc.documentElement, process_schema=True)
except Exception, e:
print 'ERROR building schema: %s' % (e,)
sys.exit(1)
open('raw_weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=spec.schema()))
| import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
import pyxb.standard.bindings.soaphttp as soaphttp
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
spec = wsdl.definitions.CreateFromDOM(doc.documentElement)
print pyxb.Namespace.AvailableForLoad()
for t in spec.types():
for t2 in t.wildcardElements():
if isinstance(t2, Node):
print t2.namespaceURI
attribute_map.update(domutils.AttributeMap(t2))
ns = pyxb.Namespace.NamespaceForURI(t2.namespaceURI)
type_spec = xs.schema.CreateFromDOM(t2, attribute_map)
print type_spec
open('weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=type_spec))
sys.exit(0)
Update how schema is builtimport pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
try:
spec = wsdl.definitions.CreateFromDOM(doc.documentElement, process_schema=True)
except Exception, e:
print 'ERROR building schema: %s' % (e,)
sys.exit(1)
open('raw_weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=spec.schema()))
| <commit_before>import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
import pyxb.standard.bindings.soaphttp as soaphttp
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
spec = wsdl.definitions.CreateFromDOM(doc.documentElement)
print pyxb.Namespace.AvailableForLoad()
for t in spec.types():
for t2 in t.wildcardElements():
if isinstance(t2, Node):
print t2.namespaceURI
attribute_map.update(domutils.AttributeMap(t2))
ns = pyxb.Namespace.NamespaceForURI(t2.namespaceURI)
type_spec = xs.schema.CreateFromDOM(t2, attribute_map)
print type_spec
open('weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=type_spec))
sys.exit(0)
<commit_msg>Update how schema is built<commit_after>import pyxb.Namespace
import pyxb.xmlschema as xs
import sys
import pyxb.standard.bindings.wsdl as wsdl
from xml.dom import Node
from xml.dom import minidom
import pyxb.binding.generate
import pyxb.utils.domutils as domutils
doc = minidom.parse('weather.wsdl')
root = doc.documentElement
attribute_map = domutils.AttributeMap(root)
try:
spec = wsdl.definitions.CreateFromDOM(doc.documentElement, process_schema=True)
except Exception, e:
print 'ERROR building schema: %s' % (e,)
sys.exit(1)
open('raw_weather.py', 'w').write(pyxb.binding.generate.GeneratePython(schema=spec.schema()))
|
52a4a10d54374b08c5835d02077fd1edcdc547ac | tests/test_union_energy_grids/results.py | tests/test_union_energy_grids/results.py | #!/usr/bin/env python
import sys
# import statepoint
sys.path.insert(0, '../../src/utils')
import statepoint
# read in statepoint file
if len(sys.argv) > 1:
sp = statepoint.StatePoint(sys.argv[1])
else:
sp = statepoint.StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
| #!/usr/bin/env python
import sys
sys.path.insert(0, '../../src/utils')
from openmc.statepoint import StatePoint
# read in statepoint file
if len(sys.argv) > 1:
print(sys.argv)
sp = StatePoint(sys.argv[1])
else:
sp = StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
| Fix import for statepoint for test_union_energy_grids | Fix import for statepoint for test_union_energy_grids
| Python | mit | smharper/openmc,paulromano/openmc,mit-crpg/openmc,bhermanmit/openmc,wbinventor/openmc,paulromano/openmc,samuelshaner/openmc,mit-crpg/openmc,smharper/openmc,samuelshaner/openmc,johnnyliu27/openmc,mit-crpg/openmc,lilulu/openmc,walshjon/openmc,shikhar413/openmc,liangjg/openmc,shikhar413/openmc,walshjon/openmc,paulromano/openmc,amandalund/openmc,lilulu/openmc,samuelshaner/openmc,kellyrowland/openmc,kellyrowland/openmc,wbinventor/openmc,smharper/openmc,shikhar413/openmc,johnnyliu27/openmc,smharper/openmc,wbinventor/openmc,wbinventor/openmc,walshjon/openmc,lilulu/openmc,amandalund/openmc,johnnyliu27/openmc,mjlong/openmc,liangjg/openmc,mit-crpg/openmc,paulromano/openmc,shikhar413/openmc,bhermanmit/openmc,samuelshaner/openmc,johnnyliu27/openmc,liangjg/openmc,walshjon/openmc,amandalund/openmc,amandalund/openmc,liangjg/openmc,mjlong/openmc | #!/usr/bin/env python
import sys
# import statepoint
sys.path.insert(0, '../../src/utils')
import statepoint
# read in statepoint file
if len(sys.argv) > 1:
sp = statepoint.StatePoint(sys.argv[1])
else:
sp = statepoint.StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
Fix import for statepoint for test_union_energy_grids | #!/usr/bin/env python
import sys
sys.path.insert(0, '../../src/utils')
from openmc.statepoint import StatePoint
# read in statepoint file
if len(sys.argv) > 1:
print(sys.argv)
sp = StatePoint(sys.argv[1])
else:
sp = StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
| <commit_before>#!/usr/bin/env python
import sys
# import statepoint
sys.path.insert(0, '../../src/utils')
import statepoint
# read in statepoint file
if len(sys.argv) > 1:
sp = statepoint.StatePoint(sys.argv[1])
else:
sp = statepoint.StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
<commit_msg>Fix import for statepoint for test_union_energy_grids<commit_after> | #!/usr/bin/env python
import sys
sys.path.insert(0, '../../src/utils')
from openmc.statepoint import StatePoint
# read in statepoint file
if len(sys.argv) > 1:
print(sys.argv)
sp = StatePoint(sys.argv[1])
else:
sp = StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
| #!/usr/bin/env python
import sys
# import statepoint
sys.path.insert(0, '../../src/utils')
import statepoint
# read in statepoint file
if len(sys.argv) > 1:
sp = statepoint.StatePoint(sys.argv[1])
else:
sp = statepoint.StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
Fix import for statepoint for test_union_energy_grids#!/usr/bin/env python
import sys
sys.path.insert(0, '../../src/utils')
from openmc.statepoint import StatePoint
# read in statepoint file
if len(sys.argv) > 1:
print(sys.argv)
sp = StatePoint(sys.argv[1])
else:
sp = StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
| <commit_before>#!/usr/bin/env python
import sys
# import statepoint
sys.path.insert(0, '../../src/utils')
import statepoint
# read in statepoint file
if len(sys.argv) > 1:
sp = statepoint.StatePoint(sys.argv[1])
else:
sp = statepoint.StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
<commit_msg>Fix import for statepoint for test_union_energy_grids<commit_after>#!/usr/bin/env python
import sys
sys.path.insert(0, '../../src/utils')
from openmc.statepoint import StatePoint
# read in statepoint file
if len(sys.argv) > 1:
print(sys.argv)
sp = StatePoint(sys.argv[1])
else:
sp = StatePoint('statepoint.10.binary')
sp.read_results()
# set up output string
outstr = ''
# write out k-combined
outstr += 'k-combined:\n'
outstr += "{0:12.6E} {1:12.6E}\n".format(sp.k_combined[0], sp.k_combined[1])
# write results to file
with open('results_test.dat','w') as fh:
fh.write(outstr)
|
b9441dd6e41aa8a73ca61df0963e086bb97ab4ef | src/eyeflask/__init__.py | src/eyeflask/__init__.py | from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
from .server import server
app.register_blueprint(server)
return app
| from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
app.debug_log_format = '\n'.join([
80 * '-',
'%(asctime)s %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:',
'%(message)s',
80 * '-'
])
from .server import server
app.register_blueprint(server)
return app
| Add time to debug log format | Add time to debug log format
| Python | mit | n8henrie/eyeflask | from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
from .server import server
app.register_blueprint(server)
return app
Add time to debug log format | from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
app.debug_log_format = '\n'.join([
80 * '-',
'%(asctime)s %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:',
'%(message)s',
80 * '-'
])
from .server import server
app.register_blueprint(server)
return app
| <commit_before>from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
from .server import server
app.register_blueprint(server)
return app
<commit_msg>Add time to debug log format<commit_after> | from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
app.debug_log_format = '\n'.join([
80 * '-',
'%(asctime)s %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:',
'%(message)s',
80 * '-'
])
from .server import server
app.register_blueprint(server)
return app
| from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
from .server import server
app.register_blueprint(server)
return app
Add time to debug log formatfrom flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
app.debug_log_format = '\n'.join([
80 * '-',
'%(asctime)s %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:',
'%(message)s',
80 * '-'
])
from .server import server
app.register_blueprint(server)
return app
| <commit_before>from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
from .server import server
app.register_blueprint(server)
return app
<commit_msg>Add time to debug log format<commit_after>from flask import Flask
import os
import os.path
__version__ = "0.1.0"
def create_app(config=None):
app = Flask(__name__, instance_relative_config=True)
config_name = "eyeflask.cfg"
# Use user specified config file if provided.
if config:
config_path = os.path.abspath(config)
# Load from `instance` folder
# http://flask.pocoo.org/docs/0.10/config/#instance-folders
else:
config_path = config_name
try:
app.config.from_pyfile(config_path)
except FileNotFoundError:
sample_conf = os.path.join(os.path.dirname(__file__), "extras",
"eyeflask-sample.cfg")
no_conf_msg = ("Unable to load your config file.\n"
"Either specify one with the `-c` flag, or put one "
"named `{}` in the Flask\n"
"instance folder at the path below. You may have to "
"create the instance\n"
"folder if it doesn't already "
"exist.\n".format(config_name))
print(no_conf_msg)
print("Sample config: {}".format(sample_conf))
print("Instance folder: {}\n".format(app.instance_path))
raise
app.debug_log_format = '\n'.join([
80 * '-',
'%(asctime)s %(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:',
'%(message)s',
80 * '-'
])
from .server import server
app.register_blueprint(server)
return app
|
193ef7e8e4f6629e8534083e705826656c908cab | ws-tests/test_amendment_list.py | ws-tests/test_amendment_list.py | #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + 'v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
| #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + '/v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
| Use typical domain value (no trailing slash) | Use typical domain value (no trailing slash)
| Python | bsd-2-clause | OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api | #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + 'v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
Use typical domain value (no trailing slash) | #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + '/v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
| <commit_before>#!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + 'v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
<commit_msg>Use typical domain value (no trailing slash)<commit_after> | #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + '/v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
| #!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + 'v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
Use typical domain value (no trailing slash)#!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + '/v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
| <commit_before>#!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + 'v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
<commit_msg>Use typical domain value (no trailing slash)<commit_after>#!/usr/bin/env python
import sys, os
from opentreetesting import test_http_json_method, config
DOMAIN = config('host', 'apihost')
SUBMIT_URI = DOMAIN + '/v3/amendments/list_all'
r = test_http_json_method(SUBMIT_URI,
'GET',
expected_status=200,
return_bool_data=True)
if not r[0]:
sys.exit(1)
#print r[1]
|
ccaf9cce478c077040a45340b498063ea8b7fc50 | elfi/__init__.py | elfi/__init__.py | # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3_dev'
| # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3'
| Revert "Change version for dev" | Revert "Change version for dev"
This reverts commit 56a9db4cfeda91235313c5e72cb06420a6110673.
| Python | bsd-3-clause | HIIT/elfi,elfi-dev/elfi,lintusj1/elfi,elfi-dev/elfi,lintusj1/elfi | # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3_dev'
Revert "Change version for dev"
This reverts commit 56a9db4cfeda91235313c5e72cb06420a6110673. | # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3'
| <commit_before># -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3_dev'
<commit_msg>Revert "Change version for dev"
This reverts commit 56a9db4cfeda91235313c5e72cb06420a6110673.<commit_after> | # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3'
| # -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3_dev'
Revert "Change version for dev"
This reverts commit 56a9db4cfeda91235313c5e72cb06420a6110673.# -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3'
| <commit_before># -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3_dev'
<commit_msg>Revert "Change version for dev"
This reverts commit 56a9db4cfeda91235313c5e72cb06420a6110673.<commit_after># -*- coding: utf-8 -*-
from elfi.core import Transform, Simulator, Summary, Discrepancy
from elfi.distributions import *
from elfi.result import *
from elfi.methods import *
from elfi.storage import *
from elfi.visualization import *
from elfi.inference_task import InferenceTask
from elfi.wrapper import *
from elfi.env import client, inference_task, new_inference_task
from elfi import tools
__author__ = 'ELFI authors'
__email__ = 'elfi-support@hiit.fi'
__version__ = '0.3'
|
b544361b2e3f7942a82a911a8d6d314a2044be97 | almostfunded/wsgi.py | almostfunded/wsgi.py | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) | Add missing import statement for whitenoise | Add missing import statement for whitenoise
| Python | mit | lorenanicole/almost_funded,lorenanicole/almost_funded,lorenanicole/almost_funded | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)Add missing import statement for whitenoise | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) | <commit_before>"""
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)<commit_msg>Add missing import statement for whitenoise<commit_after> | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) | """
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)Add missing import statement for whitenoise"""
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) | <commit_before>"""
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)<commit_msg>Add missing import statement for whitenoise<commit_after>"""
WSGI config for untitled1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "almostfunded.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application) |
b18aa2f4400deab98cc0e27c798ee6d7893232cd | utils/text.py | utils/text.py | def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
lang = translator.detect(word)
if lang not in langs:
lang = 'en'
return lang
| def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate, YandexTranslateException
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
try:
lang = translator.detect(word)
except YandexTranslateException:
lang = 'en'
if lang not in langs:
lang = 'en'
return lang
| Add unknown language error catching | Add unknown language error catching
| Python | mit | Elishanto/HarryBotter | def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
lang = translator.detect(word)
if lang not in langs:
lang = 'en'
return lang
Add unknown language error catching | def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate, YandexTranslateException
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
try:
lang = translator.detect(word)
except YandexTranslateException:
lang = 'en'
if lang not in langs:
lang = 'en'
return lang
| <commit_before>def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
lang = translator.detect(word)
if lang not in langs:
lang = 'en'
return lang
<commit_msg>Add unknown language error catching<commit_after> | def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate, YandexTranslateException
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
try:
lang = translator.detect(word)
except YandexTranslateException:
lang = 'en'
if lang not in langs:
lang = 'en'
return lang
| def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
lang = translator.detect(word)
if lang not in langs:
lang = 'en'
return lang
Add unknown language error catchingdef restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate, YandexTranslateException
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
try:
lang = translator.detect(word)
except YandexTranslateException:
lang = 'en'
if lang not in langs:
lang = 'en'
return lang
| <commit_before>def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
lang = translator.detect(word)
if lang not in langs:
lang = 'en'
return lang
<commit_msg>Add unknown language error catching<commit_after>def restrict_len(content):
if len(content) > 320:
content = content[:310].strip() + '...'
return content
def detect_language(config, langs, word):
from yandex_translate import YandexTranslate, YandexTranslateException
translator = YandexTranslate(config['yandex_translate_key'])
russian = 'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
if word[0] in russian:
lang = 'ru'
else:
try:
lang = translator.detect(word)
except YandexTranslateException:
lang = 'en'
if lang not in langs:
lang = 'en'
return lang
|
41fe44e99361d9006a8b196e9b886ffdb3e8e460 | functional_tests/test_evexml.py | functional_tests/test_evexml.py | """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_submit')
self.browser.get(self.live_server_url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
| """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_add')
self.browser.get(url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
| Make test get correct url | Make test get correct url
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd | """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_submit')
self.browser.get(self.live_server_url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
Make test get correct url | """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_add')
self.browser.get(url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
| <commit_before>"""Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_submit')
self.browser.get(self.live_server_url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
<commit_msg>Make test get correct url<commit_after> | """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_add')
self.browser.get(url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
| """Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_submit')
self.browser.get(self.live_server_url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
Make test get correct url"""Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_add')
self.browser.get(url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
| <commit_before>"""Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_submit')
self.browser.get(self.live_server_url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
<commit_msg>Make test get correct url<commit_after>"""Functional tests for the xml api part of aniauth project.
This is a temporary app as EVE Online's xml api is deprecated and will be
disabled March 2018.
"""
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.shortcuts import reverse
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
MAX_WAIT = 10
@tag('functional')
class SubmissionTest(StaticLiveServerTestCase):
"""Tests for users who are submitting xml api key.
"""
@classmethod
def setUpClass(cls):
super(SubmissionTest, cls).setUpClass()
cls.browser = webdriver.Chrome()
cls.browser.maximize_window()
cls.browser.implicitly_wait(MAX_WAIT)
super(SubmissionTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.browser.refresh()
cls.browser.quit()
super(SubmissionTest, cls).tearDownClass()
def tearDown(self):
self.browser.refresh()
def test_user_can_see_apikey_form(self):
"""A user should be able to see the form for submitting api keys.
"""
# They browse to the eve api keys page.
url = self.live_server_url + reverse('eveapi_add')
self.browser.get(url)
# They see input boxes for keyID and vCode.
keyid_input = self.browser.find_element_by_name('keyID')
vcode_input = self.browser.find_element_by_name('vCode')
|
f468ea8123768a3f66621bfecae20814fa83017b | website_sale_clear_line/controllers/main.py | website_sale_clear_line/controllers/main.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
| FIX website sale clear line | FIX website sale clear line
| Python | agpl-3.0 | ingadhoc/website | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
FIX website sale clear line | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
<commit_msg>FIX website sale clear line<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
FIX website sale clear line# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, uid, context, pool = (
request.cr, request.uid, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, uid, line_id, context=context)
<commit_msg>FIX website sale clear line<commit_after># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp.http import request
from openerp import http, SUPERUSER_ID
class pos_website_sale(http.Controller):
@http.route(
['/shop/clear_cart_line'], type='json', auth="public", website=True)
def clear_cart_line(self, line_id, **kw):
cr, context, pool = (
request.cr, request.context, request.registry)
pool['sale.order.line'].unlink(
cr, SUPERUSER_ID, line_id, context=context)
|
8efab7ddd356a9b2e2209b668d3ed83a5ac9faf2 | tests/test_logic.py | tests/test_logic.py | from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
self.assertRaises(TypeError, logic.create_room('wrongname', 'orange'))
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
| from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def setUp(self):
self.white_char_in_name = logic.create_room('office', "name ")
self.white_char_in_typr = logic.create_room('livingspace ', "name")
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
with self.assertRaises(TypeError):
logic.create_room('wrongname', 'gooodname')
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
def test_white_char_in_name(self):
self.assertEqual(self.white_char_in_name.name, "name")
def test_white_char_in_type(self):
self.assertIsInstance(self.white_char_in_typr, model.LivingSpace)
| Add test case to test non-standard input | Add test case to test non-standard input
| Python | mit | georgreen/Geoogreen-Mamboleo-Dojo-Project | from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
self.assertRaises(TypeError, logic.create_room('wrongname', 'orange'))
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
Add test case to test non-standard input | from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def setUp(self):
self.white_char_in_name = logic.create_room('office', "name ")
self.white_char_in_typr = logic.create_room('livingspace ', "name")
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
with self.assertRaises(TypeError):
logic.create_room('wrongname', 'gooodname')
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
def test_white_char_in_name(self):
self.assertEqual(self.white_char_in_name.name, "name")
def test_white_char_in_type(self):
self.assertIsInstance(self.white_char_in_typr, model.LivingSpace)
| <commit_before>from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
self.assertRaises(TypeError, logic.create_room('wrongname', 'orange'))
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
<commit_msg>Add test case to test non-standard input<commit_after> | from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def setUp(self):
self.white_char_in_name = logic.create_room('office', "name ")
self.white_char_in_typr = logic.create_room('livingspace ', "name")
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
with self.assertRaises(TypeError):
logic.create_room('wrongname', 'gooodname')
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
def test_white_char_in_name(self):
self.assertEqual(self.white_char_in_name.name, "name")
def test_white_char_in_type(self):
self.assertIsInstance(self.white_char_in_typr, model.LivingSpace)
| from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
self.assertRaises(TypeError, logic.create_room('wrongname', 'orange'))
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
Add test case to test non-standard inputfrom context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def setUp(self):
self.white_char_in_name = logic.create_room('office', "name ")
self.white_char_in_typr = logic.create_room('livingspace ', "name")
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
with self.assertRaises(TypeError):
logic.create_room('wrongname', 'gooodname')
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
def test_white_char_in_name(self):
self.assertEqual(self.white_char_in_name.name, "name")
def test_white_char_in_type(self):
self.assertIsInstance(self.white_char_in_typr, model.LivingSpace)
| <commit_before>from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
self.assertRaises(TypeError, logic.create_room('wrongname', 'orange'))
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
<commit_msg>Add test case to test non-standard input<commit_after>from context import core
from context import models
from models import model
from core import logic
import unittest
class test_logic(unittest.TestCase):
def setUp(self):
self.white_char_in_name = logic.create_room('office', "name ")
self.white_char_in_typr = logic.create_room('livingspace ', "name")
def test_create_room_office(self):
new_office = logic.create_room('office', 'orange')
self.assertIsInstance(new_office, model.Office)
def test_create_room_livingspace(self):
new_livingspace = logic.create_room('livingspace', 'manjaro')
self.assertIsInstance(new_livingspace, model.LivingSpace)
def test_create_room_Wrongtype(self):
with self.assertRaises(TypeError):
logic.create_room('wrongname', 'gooodname')
def test_create_room_Noname(self):
self.assertEqual(logic.create_room('office', ' '), 'Invalid name')
def test_white_char_in_name(self):
self.assertEqual(self.white_char_in_name.name, "name")
def test_white_char_in_type(self):
self.assertIsInstance(self.white_char_in_typr, model.LivingSpace)
|
b457ab3abc0dbcfd94b26f7107144af67b8a85f4 | utils/checks.py | utils/checks.py | from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
| from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = await ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
| Make is_owner_or not return True always | Make is_owner_or not return True always
| Python | mit | BeatButton/beattie-bot,BeatButton/beattie | from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
Make is_owner_or not return True always | from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = await ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
| <commit_before>from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
<commit_msg>Make is_owner_or not return True always<commit_after> | from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = await ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
| from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
Make is_owner_or not return True alwaysfrom discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = await ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
| <commit_before>from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
<commit_msg>Make is_owner_or not return True always<commit_after>from discord.ext import commands
def is_owner_or(**perms):
async def predicate(ctx):
owner = await ctx.bot.is_owner(ctx.author)
permissions = ctx.channel.permissions_for(ctx.author)
return all(getattr(permissions, perm, None) == value
for perm, value in perms.items()) or owner
return commands.check(predicate)
|
87881f594836b7cf92ffce69dbb643ee05df88d1 | utils/config_utils.py | utils/config_utils.py | import sys
import yaml
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
| import sys
import yaml
def main_config():
config_file = './config/.main.yml'
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing main config file: '{0}'".format(config_file))
return config, config_file
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
| Add util for loading main config | Add util for loading main config
| Python | mit | projectweekend/Transit-Stop-Collector | import sys
import yaml
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
Add util for loading main config | import sys
import yaml
def main_config():
config_file = './config/.main.yml'
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing main config file: '{0}'".format(config_file))
return config, config_file
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
| <commit_before>import sys
import yaml
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
<commit_msg>Add util for loading main config<commit_after> | import sys
import yaml
def main_config():
config_file = './config/.main.yml'
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing main config file: '{0}'".format(config_file))
return config, config_file
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
| import sys
import yaml
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
Add util for loading main configimport sys
import yaml
def main_config():
config_file = './config/.main.yml'
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing main config file: '{0}'".format(config_file))
return config, config_file
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
| <commit_before>import sys
import yaml
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
<commit_msg>Add util for loading main config<commit_after>import sys
import yaml
def main_config():
config_file = './config/.main.yml'
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing main config file: '{0}'".format(config_file))
return config, config_file
def job_config(args):
try:
config_file = './config/{0}.yml'.format(args[0])
except IndexError:
sys.exit("Job name is a required argument. Example: chicago_cta")
try:
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
except IOError:
sys.exit("Missing config file for job: '{0}'".format(config_file))
return config, config_file
|
f699281496950a10ec59651ed93f61e8ef79c1e7 | admin/__init__.py | admin/__init__.py | from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous:
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
| from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous():
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
| Revert "Fix 502 on admin page, is_authenticated() is now a boolean" | Revert "Fix 502 on admin page, is_authenticated() is now a boolean"
This reverts commit 0d9a7f86c1d988b4e0f4617c5e4f0409c0754e19.
| Python | mit | zifnab06/zifb.in,zifnab06/zifb.in | from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous:
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
Revert "Fix 502 on admin page, is_authenticated() is now a boolean"
This reverts commit 0d9a7f86c1d988b4e0f4617c5e4f0409c0754e19. | from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous():
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
| <commit_before>from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous:
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
<commit_msg>Revert "Fix 502 on admin page, is_authenticated() is now a boolean"
This reverts commit 0d9a7f86c1d988b4e0f4617c5e4f0409c0754e19.<commit_after> | from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous():
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
| from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous:
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
Revert "Fix 502 on admin page, is_authenticated() is now a boolean"
This reverts commit 0d9a7f86c1d988b4e0f4617c5e4f0409c0754e19.from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous():
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
| <commit_before>from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous:
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
<commit_msg>Revert "Fix 502 on admin page, is_authenticated() is now a boolean"
This reverts commit 0d9a7f86c1d988b4e0f4617c5e4f0409c0754e19.<commit_after>from database import User, Paste, ApiKey
from flask import abort, current_app
from flask_login import current_user
from flask_admin import BaseView, Admin
from flask_admin.contrib.mongoengine import ModelView
def is_admin():
if current_user.is_anonymous():
abort(403)
else:
return current_user.admin
class ProtectedView(BaseView):
def is_accessible(self):
return is_admin()
class SecureModelView(ModelView):
def is_accessible(self):
return is_admin()
class UserModel(SecureModelView):
column_exclude_list = "hash"
column_searchable_list = ("username", "email")
can_create = False
can_edit = True
can_delete = True
class PasteModel(SecureModelView):
list_display = ("name", "paste", "time", "expire", "user", "views", "language")
column_searchable_list = ("name", "paste")
class ApiKeyModel(SecureModelView):
pass
admin = Admin(current_app)
admin.add_view(UserModel(User))
admin.add_view(PasteModel(Paste))
admin.add_view(ApiKeyModel(ApiKey))
|
a8da08a6cf3bff12ce1668c5f4f99710317e42f0 | tests/unit/test_place.py | tests/unit/test_place.py | """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
| """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
def test_no_standard_library_placement():
assert place.module_with_reason(
"pathlib", config=Config(sections=["THIRDPARTY"], default_section="THIRDPARTY")
) == ("THIRDPARTY", "Default option in Config or universal default.")
assert place.module("pathlib") == "STDLIB"
| Add unit test case for desired placement behavior | Add unit test case for desired placement behavior
| Python | mit | PyCQA/isort,PyCQA/isort | """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
Add unit test case for desired placement behavior | """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
def test_no_standard_library_placement():
assert place.module_with_reason(
"pathlib", config=Config(sections=["THIRDPARTY"], default_section="THIRDPARTY")
) == ("THIRDPARTY", "Default option in Config or universal default.")
assert place.module("pathlib") == "STDLIB"
| <commit_before>"""Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
<commit_msg>Add unit test case for desired placement behavior<commit_after> | """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
def test_no_standard_library_placement():
assert place.module_with_reason(
"pathlib", config=Config(sections=["THIRDPARTY"], default_section="THIRDPARTY")
) == ("THIRDPARTY", "Default option in Config or universal default.")
assert place.module("pathlib") == "STDLIB"
| """Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
Add unit test case for desired placement behavior"""Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
def test_no_standard_library_placement():
assert place.module_with_reason(
"pathlib", config=Config(sections=["THIRDPARTY"], default_section="THIRDPARTY")
) == ("THIRDPARTY", "Default option in Config or universal default.")
assert place.module("pathlib") == "STDLIB"
| <commit_before>"""Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
<commit_msg>Add unit test case for desired placement behavior<commit_after>"""Tests for the isort import placement module"""
from functools import partial
from isort import place, sections
from isort.settings import Config
def test_module(src_path):
place_tester = partial(place.module, config=Config(src_paths=[src_path]))
assert place_tester("isort") == sections.FIRSTPARTY
assert place_tester("os") == sections.STDLIB
assert place_tester(".deprecated") == sections.LOCALFOLDER
assert place_tester("__future__") == sections.FUTURE
assert place_tester("hug") == sections.THIRDPARTY
def test_extra_standard_library(src_path):
place_tester = partial(
place.module, config=Config(src_paths=[src_path], extra_standard_library=["hug"])
)
assert place_tester("os") == sections.STDLIB
assert place_tester("hug") == sections.STDLIB
def test_no_standard_library_placement():
assert place.module_with_reason(
"pathlib", config=Config(sections=["THIRDPARTY"], default_section="THIRDPARTY")
) == ("THIRDPARTY", "Default option in Config or universal default.")
assert place.module("pathlib") == "STDLIB"
|
ad278fdc71140dfb4be27895e747356e668e3b6c | teuthology/lockstatus.py | teuthology/lockstatus.py | import requests
import os
from .config import config
def get_status(name):
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
| import requests
import os
from .config import config
from .misc import canonicalize_hostname
def get_status(name):
name = canonicalize_hostname(name, user=None)
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
| Remove the 'user@' prefix before checking status | Remove the 'user@' prefix before checking status
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
| Python | mit | ceph/teuthology,t-miyamae/teuthology,zhouyuan/teuthology,caibo2014/teuthology,tchaikov/teuthology,dmick/teuthology,robbat2/teuthology,dreamhost/teuthology,ivotron/teuthology,ivotron/teuthology,SUSE/teuthology,robbat2/teuthology,michaelsevilla/teuthology,tchaikov/teuthology,michaelsevilla/teuthology,ceph/teuthology,dreamhost/teuthology,dmick/teuthology,yghannam/teuthology,t-miyamae/teuthology,zhouyuan/teuthology,dmick/teuthology,ktdreyer/teuthology,ktdreyer/teuthology,SUSE/teuthology,caibo2014/teuthology,SUSE/teuthology,yghannam/teuthology | import requests
import os
from .config import config
def get_status(name):
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
Remove the 'user@' prefix before checking status
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com> | import requests
import os
from .config import config
from .misc import canonicalize_hostname
def get_status(name):
name = canonicalize_hostname(name, user=None)
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
| <commit_before>import requests
import os
from .config import config
def get_status(name):
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
<commit_msg>Remove the 'user@' prefix before checking status
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after> | import requests
import os
from .config import config
from .misc import canonicalize_hostname
def get_status(name):
name = canonicalize_hostname(name, user=None)
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
| import requests
import os
from .config import config
def get_status(name):
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
Remove the 'user@' prefix before checking status
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>import requests
import os
from .config import config
from .misc import canonicalize_hostname
def get_status(name):
name = canonicalize_hostname(name, user=None)
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
| <commit_before>import requests
import os
from .config import config
def get_status(name):
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
<commit_msg>Remove the 'user@' prefix before checking status
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>import requests
import os
from .config import config
from .misc import canonicalize_hostname
def get_status(name):
name = canonicalize_hostname(name, user=None)
uri = os.path.join(config.lock_server, 'nodes', name, '')
response = requests.get(uri)
success = response.ok
if success:
return response.json()
return None
|
e1acfc8a05f1a131dc4b146837e007efa58a2ebf | theano/learning_rates.py | theano/learning_rates.py | """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return self.__rate
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
| """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return TT.as_tensor_variable(self._rate, name="lr")
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
| Make fixed learning rates not crash. | Make fixed learning rates not crash.
There was a slight bug in this code before, which I fixed.
| Python | mit | djpetti/rpinets,djpetti/rpinets | """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return self.__rate
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
Make fixed learning rates not crash.
There was a slight bug in this code before, which I fixed. | """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return TT.as_tensor_variable(self._rate, name="lr")
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
| <commit_before>""" Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return self.__rate
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
<commit_msg>Make fixed learning rates not crash.
There was a slight bug in this code before, which I fixed.<commit_after> | """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return TT.as_tensor_variable(self._rate, name="lr")
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
| """ Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return self.__rate
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
Make fixed learning rates not crash.
There was a slight bug in this code before, which I fixed.""" Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return TT.as_tensor_variable(self._rate, name="lr")
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
| <commit_before>""" Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return self.__rate
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
<commit_msg>Make fixed learning rates not crash.
There was a slight bug in this code before, which I fixed.<commit_after>""" Classes that simplify learning rate modification. """
import theano
import theano.tensor as TT
class _LearningRate(object):
""" Suplerclass for learning rates. """
def __init__(self, initial_rate):
"""
Args:
initial_rate: Initial value of the learning rate. """
self._rate = initial_rate
def get(self, cycle):
"""
Args:
cycle: The symbolic global step.
Returns:
The learning rate to use for this cycle. """
return TT.as_tensor_variable(self._rate, name="lr")
class Fixed(_LearningRate):
""" The simplest type of learning rate. It is just a fixed value. """
pass
class ExponentialDecay(_LearningRate):
""" A learning rate that decays exponentially with time. """
def __init__(self, decay_rate, decay_steps, *args, **kwargs):
"""
Args:
decay_rate: Number of steps needed to decay by decay_rate.
decay_steps: The decay rate. """
super(ExponentialDecay, self).__init__(*args, **kwargs)
self.__decay_steps = decay_steps
self.__decay_rate = decay_rate
def get(self, cycle):
rate = self._rate * self.__decay_rate ** (cycle / float(self.__decay_steps))
return TT.cast(rate, theano.config.floatX)
|
e469ae00a815b25ab7d0e45c7f8076c56ae834b4 | test/__init__.py | test/__init__.py |
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
|
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
def assertGreaterEqual(self, first, second, msg=None):
"""
Test that first is respectively >= than second
depending on the method name. If not, the test will fail.
"""
if first >= second:
pass
else:
self.fail(msg=msg)
| Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7 | Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7
| Python | mit | pombredanne/jsonstats,pombredanne/jsonstats,RHInception/jsonstats,RHInception/jsonstats |
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7 |
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
def assertGreaterEqual(self, first, second, msg=None):
"""
Test that first is respectively >= than second
depending on the method name. If not, the test will fail.
"""
if first >= second:
pass
else:
self.fail(msg=msg)
| <commit_before>
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
<commit_msg>Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7<commit_after> |
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
def assertGreaterEqual(self, first, second, msg=None):
"""
Test that first is respectively >= than second
depending on the method name. If not, the test will fail.
"""
if first >= second:
pass
else:
self.fail(msg=msg)
|
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
def assertGreaterEqual(self, first, second, msg=None):
"""
Test that first is respectively >= than second
depending on the method name. If not, the test will fail.
"""
if first >= second:
pass
else:
self.fail(msg=msg)
| <commit_before>
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
<commit_msg>Implement unittest.TestCase.assertGreaterEqual for pythons less than 2.7<commit_after>
import unittest
class TestCase(unittest.TestCase):
"""
Parent TestCase to use for all tests.
"""
pass
def assertGreaterEqual(self, first, second, msg=None):
"""
Test that first is respectively >= than second
depending on the method name. If not, the test will fail.
"""
if first >= second:
pass
else:
self.fail(msg=msg)
|
5fa20295eadf01e4567d89713e406ce82b738cf5 | ephemeral-cluster.py | ephemeral-cluster.py | #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
| #!/usr/bin/env python
import itertools
import subprocess
import sys
import uuid
def get_images_for_project(project):
"""
Returns a set of image names associated with a project label.
"""
p = subprocess.Popen(['docker', 'images'], stdout=subprocess.PIPE)
images = set()
while p.returncode is None:
out, err = p.communicate()
for line in itertools.ifilter(None, out.splitlines()):
bits = line.split()
if bits[0].startswith('{0}_'.format(project)):
images.add(bits[0])
if p.returncode != 0:
raise Exception('Error while retrieving images!')
return images
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'build'])
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up containers for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
sys.stderr.write('\nCleaning up images for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker', 'rmi'] + list(get_images_for_project(project)))
| Clean up built images for ephemeral cluster. | Clean up built images for ephemeral cluster.
Reviewers: tail, pi, jeff
Reviewed By: jeff
Differential Revision: http://phabricator.local.disqus.net/D19797
| Python | apache-2.0 | fuziontech/pgshovel,fuziontech/pgshovel,disqus/pgshovel,disqus/pgshovel,fuziontech/pgshovel | #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
Clean up built images for ephemeral cluster.
Reviewers: tail, pi, jeff
Reviewed By: jeff
Differential Revision: http://phabricator.local.disqus.net/D19797 | #!/usr/bin/env python
import itertools
import subprocess
import sys
import uuid
def get_images_for_project(project):
"""
Returns a set of image names associated with a project label.
"""
p = subprocess.Popen(['docker', 'images'], stdout=subprocess.PIPE)
images = set()
while p.returncode is None:
out, err = p.communicate()
for line in itertools.ifilter(None, out.splitlines()):
bits = line.split()
if bits[0].startswith('{0}_'.format(project)):
images.add(bits[0])
if p.returncode != 0:
raise Exception('Error while retrieving images!')
return images
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'build'])
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up containers for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
sys.stderr.write('\nCleaning up images for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker', 'rmi'] + list(get_images_for_project(project)))
| <commit_before>#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
<commit_msg>Clean up built images for ephemeral cluster.
Reviewers: tail, pi, jeff
Reviewed By: jeff
Differential Revision: http://phabricator.local.disqus.net/D19797<commit_after> | #!/usr/bin/env python
import itertools
import subprocess
import sys
import uuid
def get_images_for_project(project):
"""
Returns a set of image names associated with a project label.
"""
p = subprocess.Popen(['docker', 'images'], stdout=subprocess.PIPE)
images = set()
while p.returncode is None:
out, err = p.communicate()
for line in itertools.ifilter(None, out.splitlines()):
bits = line.split()
if bits[0].startswith('{0}_'.format(project)):
images.add(bits[0])
if p.returncode != 0:
raise Exception('Error while retrieving images!')
return images
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'build'])
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up containers for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
sys.stderr.write('\nCleaning up images for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker', 'rmi'] + list(get_images_for_project(project)))
| #!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
Clean up built images for ephemeral cluster.
Reviewers: tail, pi, jeff
Reviewed By: jeff
Differential Revision: http://phabricator.local.disqus.net/D19797#!/usr/bin/env python
import itertools
import subprocess
import sys
import uuid
def get_images_for_project(project):
"""
Returns a set of image names associated with a project label.
"""
p = subprocess.Popen(['docker', 'images'], stdout=subprocess.PIPE)
images = set()
while p.returncode is None:
out, err = p.communicate()
for line in itertools.ifilter(None, out.splitlines()):
bits = line.split()
if bits[0].startswith('{0}_'.format(project)):
images.add(bits[0])
if p.returncode != 0:
raise Exception('Error while retrieving images!')
return images
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'build'])
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up containers for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
sys.stderr.write('\nCleaning up images for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker', 'rmi'] + list(get_images_for_project(project)))
| <commit_before>#!/usr/bin/env python
import subprocess
import sys
import uuid
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
<commit_msg>Clean up built images for ephemeral cluster.
Reviewers: tail, pi, jeff
Reviewed By: jeff
Differential Revision: http://phabricator.local.disqus.net/D19797<commit_after>#!/usr/bin/env python
import itertools
import subprocess
import sys
import uuid
def get_images_for_project(project):
"""
Returns a set of image names associated with a project label.
"""
p = subprocess.Popen(['docker', 'images'], stdout=subprocess.PIPE)
images = set()
while p.returncode is None:
out, err = p.communicate()
for line in itertools.ifilter(None, out.splitlines()):
bits = line.split()
if bits[0].startswith('{0}_'.format(project)):
images.add(bits[0])
if p.returncode != 0:
raise Exception('Error while retrieving images!')
return images
usage = """\
Run a command using a temporary docker-compose cluster, removing all containers \
and associated volumes after command completion (regardless of success or \
failure.)
Generally, this would be used with the ``run`` command to provide a clean room \
testing environment.
"""
if not sys.argv[1:]:
sys.stderr.write(usage)
sys.exit(1)
project = uuid.uuid1().hex
sys.stderr.write('Setting up ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'build'])
try:
subprocess.check_call(['docker-compose', '-p', project] + sys.argv[1:])
except subprocess.CalledProcessError as error:
raise SystemExit(error.returncode)
finally:
sys.stderr.write('\nCleaning up containers for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker-compose', '-p', project, 'stop'])
subprocess.check_call(['docker-compose', '-p', project, 'rm', '-f', '-v'])
sys.stderr.write('\nCleaning up images for ephemeral cluster ({0})...\n'.format(project))
subprocess.check_call(['docker', 'rmi'] + list(get_images_for_project(project)))
|
3e2baaed603ef9d904926049414bf51b48898776 | trex/serializers.py | trex/serializers.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
class TagDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("id", "project", "name", "description", "created")
| Add a TagDetailSerializer for returning more details of a Tag then id and name | Add a TagDetailSerializer for returning more details of a Tag then id and name
| Python | mit | bjoernricks/trex,bjoernricks/trex | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
Add a TagDetailSerializer for returning more details of a Tag then id and name | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
class TagDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("id", "project", "name", "description", "created")
| <commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
<commit_msg>Add a TagDetailSerializer for returning more details of a Tag then id and name<commit_after> | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
class TagDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("id", "project", "name", "description", "created")
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
Add a TagDetailSerializer for returning more details of a Tag then id and name# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
class TagDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("id", "project", "name", "description", "created")
| <commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
<commit_msg>Add a TagDetailSerializer for returning more details of a Tag then id and name<commit_after># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry, Tag
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("id", "name", "description", "active", "created", "entries")
class EntryTagsSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("url", "id", "name")
class EntryProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "id", "name")
class EntryDetailSerializer(HyperlinkedModelSerializer):
tags = EntryTagsSerializer(many=True)
project = EntryProjectSerializer()
class Meta:
model = Entry
fields = ("url", "id", "date", "duration", "description", "state",
"user_abbr", "user", "created", "project", "tags")
class TagDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Tag
fields = ("id", "project", "name", "description", "created")
|
1c16c8e98845550b19f6c75db5253805f656c636 | http_requests.py | http_requests.py | import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
request = self.view.substr(self.view.line(self.view.sel()[0]))
response = eval(request)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_request_variables.py') as f:
exec(f.read(), globals())
| import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
selections = self.get_selections()
for s in selections:
response = eval(s)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_requests_variables.py') as f:
exec(f.read(), globals())
def get_selections(self):
view = self.view
selections = []
for region in view.sel():
if not region.empty():
selections.append( view.substr(region) )
else:
selections.append( view.substr(view.line(region)) )
return selections
| Allow multiple requests to be made at once by iterating over selections | Allow multiple requests to be made at once by iterating over selections
| Python | mit | kylebebak/Requester,kylebebak/Requester | import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
request = self.view.substr(self.view.line(self.view.sel()[0]))
response = eval(request)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_request_variables.py') as f:
exec(f.read(), globals())
Allow multiple requests to be made at once by iterating over selections | import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
selections = self.get_selections()
for s in selections:
response = eval(s)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_requests_variables.py') as f:
exec(f.read(), globals())
def get_selections(self):
view = self.view
selections = []
for region in view.sel():
if not region.empty():
selections.append( view.substr(region) )
else:
selections.append( view.substr(view.line(region)) )
return selections
| <commit_before>import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
request = self.view.substr(self.view.line(self.view.sel()[0]))
response = eval(request)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_request_variables.py') as f:
exec(f.read(), globals())
<commit_msg>Allow multiple requests to be made at once by iterating over selections<commit_after> | import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
selections = self.get_selections()
for s in selections:
response = eval(s)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_requests_variables.py') as f:
exec(f.read(), globals())
def get_selections(self):
view = self.view
selections = []
for region in view.sel():
if not region.empty():
selections.append( view.substr(region) )
else:
selections.append( view.substr(view.line(region)) )
return selections
| import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
request = self.view.substr(self.view.line(self.view.sel()[0]))
response = eval(request)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_request_variables.py') as f:
exec(f.read(), globals())
Allow multiple requests to be made at once by iterating over selectionsimport sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
selections = self.get_selections()
for s in selections:
response = eval(s)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_requests_variables.py') as f:
exec(f.read(), globals())
def get_selections(self):
view = self.view
selections = []
for region in view.sel():
if not region.empty():
selections.append( view.substr(region) )
else:
selections.append( view.substr(view.line(region)) )
return selections
| <commit_before>import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
request = self.view.substr(self.view.line(self.view.sel()[0]))
response = eval(request)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_request_variables.py') as f:
exec(f.read(), globals())
<commit_msg>Allow multiple requests to be made at once by iterating over selections<commit_after>import sublime, sublime_plugin
import requests
from requests import delete, get, head, options, patch, post, put
class RequestCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.import_variables()
selections = self.get_selections()
for s in selections:
response = eval(s)
print(response.text)
def import_variables(self):
with open('/Users/kylebebak/GoogleDrive/Code/Config/ST/Packages/http_requests/_requests_variables.py') as f:
exec(f.read(), globals())
def get_selections(self):
view = self.view
selections = []
for region in view.sel():
if not region.empty():
selections.append( view.substr(region) )
else:
selections.append( view.substr(view.line(region)) )
return selections
|
8d99b27125af58aacbb9556c68774bdaf27fdda5 | tests/helpers.py | tests/helpers.py |
import asana
import requests
import responses
import unittest
import json
from six import next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
# From https://github.com/dropbox/responses/issues/31#issuecomment-63165210
from inspect import getmembers, isfunction, ismethod
def decallmethods(decorator, prefix='test_'):
def dectheclass(cls):
for name, m in getmembers(cls, predicate=lambda x: isfunction(x) or ismethod(x)):
if name.startswith(prefix):
setattr(cls, name, decorator(m))
return cls
return dectheclass
# TestCase subclass that automatically decorates test methods with responses.activate and sets up a client instance
class ClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
decallmethods(responses.activate)(cls)
def setUp(self):
self.client = asana.Client(
base_url='http://app',
poll_interval=0, # no delay when polling to speed up tests
iterator_type=None, # disable iterator and limit to match existing tests for now
)
| import json
import unittest
import asana
import requests
import responses
from six import add_metaclass, next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
def create_decorating_metaclass(decorators, prefix='test_'):
class DecoratingMethodsMetaclass(type):
def __new__(cls, name, bases, namespace):
namespace_items = tuple(namespace.items())
for key, val in namespace_items:
if key.startswith(prefix) and callable(val):
for dec in decorators:
val = dec(val)
namespace[key] = val
return type.__new__(cls, name, bases, dict(namespace))
return DecoratingMethodsMetaclass
# TestCase subclass that automatically decorates test methods with
# responses.activate and sets up a client instance
@add_metaclass(create_decorating_metaclass((responses.activate,)))
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = asana.Client(
base_url='http://app',
# no delay when polling to speed up tests
poll_interval=0,
# disable iterator and limit to match existing tests for now
iterator_type=None,
)
| Fix broken unittests in Python 2.7. | Fix broken unittests in Python 2.7.
Originally all test classes were decorated by going over the class via
inspection - however, this doesn't work in Python 2.7 because the
methods that are returned from getmembers are all unbound (in Python 3+,
this is fixed as all methods are just function objects).
Now this uses a metaclass with six to work in both Python 2 and 3.
| Python | mit | asana/python-asana,Asana/python-asana,asana/python-asana |
import asana
import requests
import responses
import unittest
import json
from six import next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
# From https://github.com/dropbox/responses/issues/31#issuecomment-63165210
from inspect import getmembers, isfunction, ismethod
def decallmethods(decorator, prefix='test_'):
def dectheclass(cls):
for name, m in getmembers(cls, predicate=lambda x: isfunction(x) or ismethod(x)):
if name.startswith(prefix):
setattr(cls, name, decorator(m))
return cls
return dectheclass
# TestCase subclass that automatically decorates test methods with responses.activate and sets up a client instance
class ClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
decallmethods(responses.activate)(cls)
def setUp(self):
self.client = asana.Client(
base_url='http://app',
poll_interval=0, # no delay when polling to speed up tests
iterator_type=None, # disable iterator and limit to match existing tests for now
)
Fix broken unittests in Python 2.7.
Originally all test classes were decorated by going over the class via
inspection - however, this doesn't work in Python 2.7 because the
methods that are returned from getmembers are all unbound (in Python 3+,
this is fixed as all methods are just function objects).
Now this uses a metaclass with six to work in both Python 2 and 3. | import json
import unittest
import asana
import requests
import responses
from six import add_metaclass, next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
def create_decorating_metaclass(decorators, prefix='test_'):
class DecoratingMethodsMetaclass(type):
def __new__(cls, name, bases, namespace):
namespace_items = tuple(namespace.items())
for key, val in namespace_items:
if key.startswith(prefix) and callable(val):
for dec in decorators:
val = dec(val)
namespace[key] = val
return type.__new__(cls, name, bases, dict(namespace))
return DecoratingMethodsMetaclass
# TestCase subclass that automatically decorates test methods with
# responses.activate and sets up a client instance
@add_metaclass(create_decorating_metaclass((responses.activate,)))
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = asana.Client(
base_url='http://app',
# no delay when polling to speed up tests
poll_interval=0,
# disable iterator and limit to match existing tests for now
iterator_type=None,
)
| <commit_before>
import asana
import requests
import responses
import unittest
import json
from six import next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
# From https://github.com/dropbox/responses/issues/31#issuecomment-63165210
from inspect import getmembers, isfunction, ismethod
def decallmethods(decorator, prefix='test_'):
def dectheclass(cls):
for name, m in getmembers(cls, predicate=lambda x: isfunction(x) or ismethod(x)):
if name.startswith(prefix):
setattr(cls, name, decorator(m))
return cls
return dectheclass
# TestCase subclass that automatically decorates test methods with responses.activate and sets up a client instance
class ClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
decallmethods(responses.activate)(cls)
def setUp(self):
self.client = asana.Client(
base_url='http://app',
poll_interval=0, # no delay when polling to speed up tests
iterator_type=None, # disable iterator and limit to match existing tests for now
)
<commit_msg>Fix broken unittests in Python 2.7.
Originally all test classes were decorated by going over the class via
inspection - however, this doesn't work in Python 2.7 because the
methods that are returned from getmembers are all unbound (in Python 3+,
this is fixed as all methods are just function objects).
Now this uses a metaclass with six to work in both Python 2 and 3.<commit_after> | import json
import unittest
import asana
import requests
import responses
from six import add_metaclass, next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
def create_decorating_metaclass(decorators, prefix='test_'):
class DecoratingMethodsMetaclass(type):
def __new__(cls, name, bases, namespace):
namespace_items = tuple(namespace.items())
for key, val in namespace_items:
if key.startswith(prefix) and callable(val):
for dec in decorators:
val = dec(val)
namespace[key] = val
return type.__new__(cls, name, bases, dict(namespace))
return DecoratingMethodsMetaclass
# TestCase subclass that automatically decorates test methods with
# responses.activate and sets up a client instance
@add_metaclass(create_decorating_metaclass((responses.activate,)))
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = asana.Client(
base_url='http://app',
# no delay when polling to speed up tests
poll_interval=0,
# disable iterator and limit to match existing tests for now
iterator_type=None,
)
|
import asana
import requests
import responses
import unittest
import json
from six import next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
# From https://github.com/dropbox/responses/issues/31#issuecomment-63165210
from inspect import getmembers, isfunction, ismethod
def decallmethods(decorator, prefix='test_'):
def dectheclass(cls):
for name, m in getmembers(cls, predicate=lambda x: isfunction(x) or ismethod(x)):
if name.startswith(prefix):
setattr(cls, name, decorator(m))
return cls
return dectheclass
# TestCase subclass that automatically decorates test methods with responses.activate and sets up a client instance
class ClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
decallmethods(responses.activate)(cls)
def setUp(self):
self.client = asana.Client(
base_url='http://app',
poll_interval=0, # no delay when polling to speed up tests
iterator_type=None, # disable iterator and limit to match existing tests for now
)
Fix broken unittests in Python 2.7.
Originally all test classes were decorated by going over the class via
inspection - however, this doesn't work in Python 2.7 because the
methods that are returned from getmembers are all unbound (in Python 3+,
this is fixed as all methods are just function objects).
Now this uses a metaclass with six to work in both Python 2 and 3.import json
import unittest
import asana
import requests
import responses
from six import add_metaclass, next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
def create_decorating_metaclass(decorators, prefix='test_'):
class DecoratingMethodsMetaclass(type):
def __new__(cls, name, bases, namespace):
namespace_items = tuple(namespace.items())
for key, val in namespace_items:
if key.startswith(prefix) and callable(val):
for dec in decorators:
val = dec(val)
namespace[key] = val
return type.__new__(cls, name, bases, dict(namespace))
return DecoratingMethodsMetaclass
# TestCase subclass that automatically decorates test methods with
# responses.activate and sets up a client instance
@add_metaclass(create_decorating_metaclass((responses.activate,)))
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = asana.Client(
base_url='http://app',
# no delay when polling to speed up tests
poll_interval=0,
# disable iterator and limit to match existing tests for now
iterator_type=None,
)
| <commit_before>
import asana
import requests
import responses
import unittest
import json
from six import next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
# From https://github.com/dropbox/responses/issues/31#issuecomment-63165210
from inspect import getmembers, isfunction, ismethod
def decallmethods(decorator, prefix='test_'):
def dectheclass(cls):
for name, m in getmembers(cls, predicate=lambda x: isfunction(x) or ismethod(x)):
if name.startswith(prefix):
setattr(cls, name, decorator(m))
return cls
return dectheclass
# TestCase subclass that automatically decorates test methods with responses.activate and sets up a client instance
class ClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
decallmethods(responses.activate)(cls)
def setUp(self):
self.client = asana.Client(
base_url='http://app',
poll_interval=0, # no delay when polling to speed up tests
iterator_type=None, # disable iterator and limit to match existing tests for now
)
<commit_msg>Fix broken unittests in Python 2.7.
Originally all test classes were decorated by going over the class via
inspection - however, this doesn't work in Python 2.7 because the
methods that are returned from getmembers are all unbound (in Python 3+,
this is fixed as all methods are just function objects).
Now this uses a metaclass with six to work in both Python 2 and 3.<commit_after>import json
import unittest
import asana
import requests
import responses
from six import add_metaclass, next
from responses import GET, PUT, POST, DELETE
# Define JSON primitives so we can just copy in JSON:
false = False
true = True
null = None
def create_decorating_metaclass(decorators, prefix='test_'):
class DecoratingMethodsMetaclass(type):
def __new__(cls, name, bases, namespace):
namespace_items = tuple(namespace.items())
for key, val in namespace_items:
if key.startswith(prefix) and callable(val):
for dec in decorators:
val = dec(val)
namespace[key] = val
return type.__new__(cls, name, bases, dict(namespace))
return DecoratingMethodsMetaclass
# TestCase subclass that automatically decorates test methods with
# responses.activate and sets up a client instance
@add_metaclass(create_decorating_metaclass((responses.activate,)))
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = asana.Client(
base_url='http://app',
# no delay when polling to speed up tests
poll_interval=0,
# disable iterator and limit to match existing tests for now
iterator_type=None,
)
|
53234eb1ab0bafe49b8e198336d7958fed3e3f61 | awx/main/managers.py | awx/main/managers.py | # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
| # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
| Return stub records in testing. | Return stub records in testing.
| Python | apache-2.0 | snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx | # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
Return stub records in testing. | # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
| <commit_before># Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
<commit_msg>Return stub records in testing.<commit_after> | # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
| # Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
Return stub records in testing.# Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
| <commit_before># Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
return self.get(primary=True)
<commit_msg>Return stub records in testing.<commit_after># Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved.
import sys
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(uuid=settings.SYSTEM_UUID)
def my_role(self):
"""Return the role of the currently active instance, as a string
('primary' or 'secondary').
"""
# If we are running unit tests, we are primary, because reasons.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return 'primary'
# Check if this instance is primary; if so, return "primary", otherwise
# "secondary".
if self.me().primary:
return 'primary'
return 'secondary'
def primary(self):
"""Return the primary instance."""
# If we are running unit tests, return a stub record.
if len(sys.argv) >= 2 and sys.argv[1] == 'test':
return self.model(id=1, primary=True,
uuid='00000000-0000-0000-0000-000000000000')
# Return the appropriate record from the database.
return self.get(primary=True)
|
a0791aeead7eb77ddf1553a3392f1302a1a5acbb | Instanssi/ext_blog/views.py | Instanssi/ext_blog/views.py | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('-date')[:10]:
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('date'):
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) | Sort by date, remove post limit. | ext_blog: Sort by date, remove post limit.
| Python | mit | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('-date')[:10]:
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id)ext_blog: Sort by date, remove post limit. | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('date'):
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) | <commit_before># -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('-date')[:10]:
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id)<commit_msg>ext_blog: Sort by date, remove post limit.<commit_after> | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('date'):
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) | # -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('-date')[:10]:
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id)ext_blog: Sort by date, remove post limit.# -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('date'):
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) | <commit_before># -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('-date')[:10]:
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id)<commit_msg>ext_blog: Sort by date, remove post limit.<commit_after># -*- coding: utf-8 -*-
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.shortcuts import get_object_or_404
from Instanssi.ext_blog.models import BlogEntry
from Instanssi.kompomaatti.models import Event
class blog_feed(Feed):
title = "Instanssi.org Blogi"
link = "http://instanssi.org"
description = "Instanssi-demopartyn uusimmat uutiset."
def get_object(self, request, event_id):
return get_object_or_404(Event, pk=event_id)
def items(self, obj):
entries = []
for entry in BlogEntry.objects.filter(event=obj, public=True).order_by('date'):
entry.event_url = obj.mainurl
entries.append(entry)
return entries
def item_title(self, item):
return item.title
def item_description(self, item):
return item.text
def item_link(self, item):
print item.event_url
if item.event_url and len(item.event_url) > 0:
return item.event_url + '#blog'+str(item.id)
return "http://instanssi.org/#blog"+str(item.id) |
b860372a9e874e8bc06efc9711f7b58591300e81 | tohu/__init__.py | tohu/__init__.py | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | from distutils.version import LooseVersion
from platform import python_version
min_supported_python_version = '3.6'
if LooseVersion(python_version()) < LooseVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | Use LooseVersion in version check (to avoid errors for e.g. '3.7.2+') | Use LooseVersion in version check (to avoid errors for e.g. '3.7.2+')
| Python | mit | maxalbert/tohu | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # aliasUse LooseVersion in version check (to avoid errors for e.g. '3.7.2+') | from distutils.version import LooseVersion
from platform import python_version
min_supported_python_version = '3.6'
if LooseVersion(python_version()) < LooseVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | <commit_before>from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias<commit_msg>Use LooseVersion in version check (to avoid errors for e.g. '3.7.2+')<commit_after> | from distutils.version import LooseVersion
from platform import python_version
min_supported_python_version = '3.6'
if LooseVersion(python_version()) < LooseVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # aliasUse LooseVersion in version check (to avoid errors for e.g. '3.7.2+')from distutils.version import LooseVersion
from platform import python_version
min_supported_python_version = '3.6'
if LooseVersion(python_version()) < LooseVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias | <commit_before>from distutils.version import StrictVersion
from platform import python_version
min_supported_python_version = '3.6'
if StrictVersion(python_version()) < StrictVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias<commit_msg>Use LooseVersion in version check (to avoid errors for e.g. '3.7.2+')<commit_after>from distutils.version import LooseVersion
from platform import python_version
min_supported_python_version = '3.6'
if LooseVersion(python_version()) < LooseVersion(min_supported_python_version):
error_msg = (
"Tohu requires Python {min_supported_python_version} or greater to run "
"(currently running under Python {python_version()})"
)
raise RuntimeError(error_msg)
from . import v6
from .v6.base import *
from .v6.primitive_generators import *
from .v6.derived_generators import *
from .v6.generator_dispatch import *
from .v6.custom_generator import CustomGenerator
from .v6.logging import logger
from .v6.utils import print_generated_sequence, print_tohu_version
from .v6 import base
from .v6 import primitive_generators
from .v6 import derived_generators
from .v6 import generator_dispatch
from .v6 import custom_generator
from .v6 import set_special_methods
__all__ = base.__all__ \
+ primitive_generators.__all__ \
+ derived_generators.__all__ \
+ generator_dispatch.__all__ \
+ custom_generator.__all__ \
+ ['tohu_logger', 'print_generated_sequence', 'print_tohu_version']
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
tohu_logger = logger # alias |
715dcb62966b5c80544ed9eee79a6c69d3b9d927 | blog/posts/models.py | blog/posts/models.py | from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
display_title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
| from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
| Remove display_title field from Post model. | Remove display_title field from Post model.
It wasn't being used anyway.
| Python | mit | Lukasa/minimalog | from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
display_title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
Remove display_title field from Post model.
It wasn't being used anyway. | from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
| <commit_before>from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
display_title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
<commit_msg>Remove display_title field from Post model.
It wasn't being used anyway.<commit_after> | from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
| from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
display_title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
Remove display_title field from Post model.
It wasn't being used anyway.from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
| <commit_before>from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
display_title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
<commit_msg>Remove display_title field from Post model.
It wasn't being used anyway.<commit_after>from django.db import models
class Post(models.Model):
body = models.TextField()
title = models.CharField(max_length=50)
publication_date = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Comment(models.Model):
text = models.TextField()
author = models.CharField(max_length=100)
date = models.DateTimeField(auto_now_add=True)
post = models.ForeignKey(Post)
def __unicode__(self):
return self.author + u"'s comment on " + self.post.__unicode__()
|
d8b4acd0617dc93646e177ac56b0205be1b7ff88 | seaweb_project/seaweb_project/urls.py | seaweb_project/seaweb_project/urls.py | from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
| from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
| Disable indexing via robots.txt url. | Disable indexing via robots.txt url. | Python | mit | grollins/sea-web-django | from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
Disable indexing via robots.txt url. | from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
| <commit_before>from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
<commit_msg>Disable indexing via robots.txt url.<commit_after> | from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
| from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
Disable indexing via robots.txt url.from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
| <commit_before>from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
<commit_msg>Disable indexing via robots.txt url.<commit_after>from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
|
88a83ebcfe5a3841db4cf75985a2365df87395f8 | tests/test_create_elb.py | tests/test_create_elb.py | """Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
| """Test ELB creation functions."""
from foremast.elb.splay_health import splay_health
def test_splay():
"""Splay should split Health Checks properly."""
health = splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
| Update splay_health() test to new module | tests: Update splay_health() test to new module
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
tests: Update splay_health() test to new module | """Test ELB creation functions."""
from foremast.elb.splay_health import splay_health
def test_splay():
"""Splay should split Health Checks properly."""
health = splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
| <commit_before>"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
<commit_msg>tests: Update splay_health() test to new module<commit_after> | """Test ELB creation functions."""
from foremast.elb.splay_health import splay_health
def test_splay():
"""Splay should split Health Checks properly."""
health = splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
| """Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
tests: Update splay_health() test to new module"""Test ELB creation functions."""
from foremast.elb.splay_health import splay_health
def test_splay():
"""Splay should split Health Checks properly."""
health = splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
| <commit_before>"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
<commit_msg>tests: Update splay_health() test to new module<commit_after>"""Test ELB creation functions."""
from foremast.elb.splay_health import splay_health
def test_splay():
"""Splay should split Health Checks properly."""
health = splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
c9e11c04bd5981f810b47a659f0777d1976cb01f | vaux/storage/metadata.py | vaux/storage/metadata.py | import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
| import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, timeout=20)
try:
self.rdb.db_create(db).run()
except Exception, e:
pass
self.rdb.close()
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
try:
self.rdb.table_create(table).run()
except Exception, e:
pass
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
| Create the database and table if they don't exist | Create the database and table if they don't exist
I think... :/
| Python | mit | VauxIo/core | import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
Create the database and table if they don't exist
I think... :/ | import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, timeout=20)
try:
self.rdb.db_create(db).run()
except Exception, e:
pass
self.rdb.close()
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
try:
self.rdb.table_create(table).run()
except Exception, e:
pass
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
| <commit_before>import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
<commit_msg>Create the database and table if they don't exist
I think... :/<commit_after> | import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, timeout=20)
try:
self.rdb.db_create(db).run()
except Exception, e:
pass
self.rdb.close()
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
try:
self.rdb.table_create(table).run()
except Exception, e:
pass
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
| import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
Create the database and table if they don't exist
I think... :/import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, timeout=20)
try:
self.rdb.db_create(db).run()
except Exception, e:
pass
self.rdb.close()
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
try:
self.rdb.table_create(table).run()
except Exception, e:
pass
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
| <commit_before>import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
<commit_msg>Create the database and table if they don't exist
I think... :/<commit_after>import rethinkdb as r
class MetaEngine(object):
def __init__(self, hostname, port, db, table):
self.rdb = r.connect(host=hostname, port=port, timeout=20)
try:
self.rdb.db_create(db).run()
except Exception, e:
pass
self.rdb.close()
self.rdb = r.connect(host=hostname, port=port, db=db, timeout=20)
try:
self.rdb.table_create(table).run()
except Exception, e:
pass
self.table = table
def get_all(self):
for item in r.table(self.table).run(self.rdb):
yield item
def put(self, data):
r.table(self.table).insert(data).run(self.rdb)
def delete(self, filter_data):
r.table(self.table).filter(filter_data).delete().run(self.rdb)
def get(self, filter_data):
result = list(r.table(self.table).filter(filter_data).run(self.rdb))
if len(result) > 0:
return result[0]
return None
def exists(self, filter_data):
return len(list(r.table(self.table).filter(filter_data).run(self.rdb))) > 0
def search(self, ffunc):
for item in r.table(self.table).filter(ffunc).run(self.rdb):
yield item
|
82aca6b352de3133f1fba454d82e1a20f3da2436 | src/auditlog_tests/test_settings.py | src/auditlog_tests/test_settings.py | """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
| """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
ROOT_URLCONF = []
| Make Django 1.9+ accept the settings file | Make Django 1.9+ accept the settings file
| Python | mit | Zmeylol/auditlog,jjkester/django-auditlog,kbussell/django-auditlog,chris-griffin/django-auditlog | """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
Make Django 1.9+ accept the settings file | """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
ROOT_URLCONF = []
| <commit_before>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
<commit_msg>Make Django 1.9+ accept the settings file<commit_after> | """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
ROOT_URLCONF = []
| """
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
Make Django 1.9+ accept the settings file"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
ROOT_URLCONF = []
| <commit_before>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
<commit_msg>Make Django 1.9+ accept the settings file<commit_after>"""
Settings file for the Auditlog test suite.
"""
SECRET_KEY = 'test'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'auditlog',
'auditlog_tests',
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'auditlog.middleware.AuditlogMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'auditlog_tests.db',
}
}
ROOT_URLCONF = []
|
3d2a67ab4b46bf389376544fb21341163b57e8fa | mptt/signals.py | mptt/signals.py | import django.dispatch
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = django.dispatch.Signal(providing_args=[
'instance',
'target',
'position'
])
| from django.db.models.signals import ModelSignal as Signal
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = Signal(providing_args=[
'instance',
'target',
'position'
])
| Use ModelSignal over plain Signal | Use ModelSignal over plain Signal | Python | mit | matthiask/django-mptt,matthiask/django-mptt,matthiask/django-mptt,matthiask/django-mptt | import django.dispatch
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = django.dispatch.Signal(providing_args=[
'instance',
'target',
'position'
])
Use ModelSignal over plain Signal | from django.db.models.signals import ModelSignal as Signal
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = Signal(providing_args=[
'instance',
'target',
'position'
])
| <commit_before>import django.dispatch
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = django.dispatch.Signal(providing_args=[
'instance',
'target',
'position'
])
<commit_msg>Use ModelSignal over plain Signal<commit_after> | from django.db.models.signals import ModelSignal as Signal
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = Signal(providing_args=[
'instance',
'target',
'position'
])
| import django.dispatch
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = django.dispatch.Signal(providing_args=[
'instance',
'target',
'position'
])
Use ModelSignal over plain Signalfrom django.db.models.signals import ModelSignal as Signal
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = Signal(providing_args=[
'instance',
'target',
'position'
])
| <commit_before>import django.dispatch
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = django.dispatch.Signal(providing_args=[
'instance',
'target',
'position'
])
<commit_msg>Use ModelSignal over plain Signal<commit_after>from django.db.models.signals import ModelSignal as Signal
# Behaves like Djangos normal pre-/post_save signals signals with the
# added arguments ``target`` and ``position`` that matches those of
# ``move_to``.
# If the signal is called from ``save`` it'll not be pass position.
node_moved = Signal(providing_args=[
'instance',
'target',
'position'
])
|
d054180fe1ff5ff7f4a0bc5b62f8dfcbb15a9c09 | winthrop/people/admin.py | winthrop/people/admin.py | from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
admin.site.register(Person)
admin.site.register(Residence)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
| from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
class ResidenceInline(admin.TabularInline):
'''Inline class for Residence'''
model = Residence
class PersonAdmin(admin.ModelAdmin):
inlines = [
ResidenceInline
]
admin.site.register(Person, PersonAdmin)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
| Add residence as an inline to person/people | Add residence as an inline to person/people
| Python | apache-2.0 | Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django | from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
admin.site.register(Person)
admin.site.register(Residence)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
Add residence as an inline to person/people | from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
class ResidenceInline(admin.TabularInline):
'''Inline class for Residence'''
model = Residence
class PersonAdmin(admin.ModelAdmin):
inlines = [
ResidenceInline
]
admin.site.register(Person, PersonAdmin)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
| <commit_before>from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
admin.site.register(Person)
admin.site.register(Residence)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
<commit_msg>Add residence as an inline to person/people<commit_after> | from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
class ResidenceInline(admin.TabularInline):
'''Inline class for Residence'''
model = Residence
class PersonAdmin(admin.ModelAdmin):
inlines = [
ResidenceInline
]
admin.site.register(Person, PersonAdmin)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
| from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
admin.site.register(Person)
admin.site.register(Residence)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
Add residence as an inline to person/peoplefrom django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
class ResidenceInline(admin.TabularInline):
'''Inline class for Residence'''
model = Residence
class PersonAdmin(admin.ModelAdmin):
inlines = [
ResidenceInline
]
admin.site.register(Person, PersonAdmin)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
| <commit_before>from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
admin.site.register(Person)
admin.site.register(Residence)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
<commit_msg>Add residence as an inline to person/people<commit_after>from django.contrib import admin
from .models import Person, Residence, RelationshipType, Relationship
class ResidenceInline(admin.TabularInline):
'''Inline class for Residence'''
model = Residence
class PersonAdmin(admin.ModelAdmin):
inlines = [
ResidenceInline
]
admin.site.register(Person, PersonAdmin)
admin.site.register(RelationshipType)
admin.site.register(Relationship)
|
26bac2e3f01ed6b7b631f9dec8d2c3156e9ce01a | blog/blog/urls.py | blog/blog/urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^blog/(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| Remove '/blog/' from the post url. | Remove '/blog/' from the post url.
Was pretty unnecessary.
| Python | mit | Lukasa/minimalog | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^blog/(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
Remove '/blog/' from the post url.
Was pretty unnecessary. | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| <commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^blog/(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Remove '/blog/' from the post url.
Was pretty unnecessary.<commit_after> | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^blog/(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
Remove '/blog/' from the post url.
Was pretty unnecessary.from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| <commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^blog/(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Remove '/blog/' from the post url.
Was pretty unnecessary.<commit_after>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('posts.views',
# Examples:
url(r'^$', 'home', name='blog_home'),
url(r'^(?P<post_year>\d{4})/(?P<post_month>\d{2})/(?P<post_title>\w+)/$',
'post',
name='blog_post'),
url(r'^archive/$', 'archive', name='blog_archive'),
url(r'^about/$', 'about', name='blog_about_me'),
# url(r'^blog/', include('blog.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
|
fc6202425e0c855dc29980904949b60c0ac48bbf | preparation/tools/build_assets.py | preparation/tools/build_assets.py | from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| Add start/finish debug info while generating | Add start/finish debug info while generating
| Python | mit | hatbot-team/hatbot_resources | from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
Add start/finish debug info while generating | from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| <commit_before>from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
<commit_msg>Add start/finish debug info while generating<commit_after> | from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
Add start/finish debug info while generatingfrom copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| <commit_before>from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
<commit_msg>Add start/finish debug info while generating<commit_after>from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
|
248b43bdfe0ad42772bf76f2e64b1624460ce494 | deployer/__init__.py | deployer/__init__.py | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.8'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.9'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| Prepare for next dev version | Prepare for next dev version | Python | mit | totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.8'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Prepare for next dev version | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.9'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| <commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.8'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Prepare for next dev version<commit_after> | from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.9'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.8'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Prepare for next dev versionfrom __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.9'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
| <commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.8'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Prepare for next dev version<commit_after>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.3.9'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
52fa13e103b77365acc9769f205d6b92af94b738 | lambda/lambda.py | lambda/lambda.py | from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
| from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, msg_get_header, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
del msg['DKIM-Signature']
print('Message from {}.'.format(msg_get_header(msg, 'from')))
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
del msg['Sender']
msg['Sender'] = cfg.address
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
| Remove any existing DKIM signature and set the Sender: header before sending. | Remove any existing DKIM signature and set the Sender: header before sending.
| Python | mit | ilg/LambdaMLM | from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
Remove any existing DKIM signature and set the Sender: header before sending. | from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, msg_get_header, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
del msg['DKIM-Signature']
print('Message from {}.'.format(msg_get_header(msg, 'from')))
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
del msg['Sender']
msg['Sender'] = cfg.address
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
| <commit_before>from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
<commit_msg>Remove any existing DKIM signature and set the Sender: header before sending.<commit_after> | from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, msg_get_header, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
del msg['DKIM-Signature']
print('Message from {}.'.format(msg_get_header(msg, 'from')))
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
del msg['Sender']
msg['Sender'] = cfg.address
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
| from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
Remove any existing DKIM signature and set the Sender: header before sending.from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, msg_get_header, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
del msg['DKIM-Signature']
print('Message from {}.'.format(msg_get_header(msg, 'from')))
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
del msg['Sender']
msg['Sender'] = cfg.address
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
| <commit_before>from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
<commit_msg>Remove any existing DKIM signature and set the Sender: header before sending.<commit_after>from __future__ import print_function
from ses import email_message_from_s3_bucket, event_msg_is_to_command, msg_get_header, recipient_destination_overlap
from cnc import handle_command
import boto3
ses = boto3.client('ses')
from config import email_bucket
from listcfg import ListConfiguration
def lambda_handler(event, context):
with email_message_from_s3_bucket(event, email_bucket) as msg:
# If it's a command, handle it as such.
command_address = event_msg_is_to_command(event, msg)
if command_address:
print('Message addressed to command ({}).'.format(command_address))
handle_command(command_address, msg)
return
del msg['DKIM-Signature']
print('Message from {}.'.format(msg_get_header(msg, 'from')))
# See if the message was sent to any known lists.
for addr in recipient_destination_overlap(event):
print('Looking for list {}...'.format(addr))
try:
cfg = ListConfiguration(addr)
except:
continue
print('Found list {}.'.format(addr))
del msg['Sender']
msg['Sender'] = cfg.address
for user, flags in cfg.config['users'].iteritems():
print('> Sending to user {}.'.format(user))
ses.send_raw_email(
Source=cfg.address,
Destinations=[ user, ],
RawMessage={ 'Data': msg.as_string(), },
)
|
40688413e59aaabd4a92dba4d2f402fb42fee143 | 1-multiples-of-3-and-5.py | 1-multiples-of-3-and-5.py | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
| from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
| Add gen exp solution to 1 | Add gen exp solution to 1
| Python | mit | dawran6/project-euler | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
Add gen exp solution to 1 | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
| <commit_before>from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
<commit_msg>Add gen exp solution to 1<commit_after> | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
| from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
Add gen exp solution to 1from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
| <commit_before>from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
<commit_msg>Add gen exp solution to 1<commit_after>from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
|
ac8e58e1430ca7418f64bb547e3513032d5b49e8 | tests/lexer_test.py | tests/lexer_test.py | from whitepy.lexerconstants import *
import whitepy.lexer as lexer
import unittest
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _valid_ws(self):
return self._get_lexer(" \t\n")
def test_get_int(self):
lexer = self._valid_ws()
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
def test_get_token(self):
lexer = self._valid_ws()
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._valid_ws()
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
| from nose.tools import *
from whitepy.lexerconstants import *
import unittest
import whitepy.lexer as lexer
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _sample_ws(self, ws_type):
ws_samples = {
'valid': " \t\n",
'invalid_int': " \t"
}
return self._get_lexer(ws_samples[ws_type])
def test_get_int(self):
lexer = self._sample_ws('valid')
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
@raises(lexer.IntError)
def test_invalid_int(self):
lexer = self._sample_ws('invalid_int')
lexer._get_int()
def test_get_token(self):
lexer = self._sample_ws('valid')
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._sample_ws('valid')
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
| Add test for invalid integer | Add test for invalid integer
A valid integer in Whitesapce ends with '\n', if an invalid integer is found the
code should raise a IntError exception.
As part of this, I have renamed `_valid_ws()` to `_sample_ws()`, which now takes
a argument for the type of whitespace needed.
| Python | apache-2.0 | yasn77/whitepy | from whitepy.lexerconstants import *
import whitepy.lexer as lexer
import unittest
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _valid_ws(self):
return self._get_lexer(" \t\n")
def test_get_int(self):
lexer = self._valid_ws()
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
def test_get_token(self):
lexer = self._valid_ws()
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._valid_ws()
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
Add test for invalid integer
A valid integer in Whitesapce ends with '\n', if an invalid integer is found the
code should raise a IntError exception.
As part of this, I have renamed `_valid_ws()` to `_sample_ws()`, which now takes
a argument for the type of whitespace needed. | from nose.tools import *
from whitepy.lexerconstants import *
import unittest
import whitepy.lexer as lexer
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _sample_ws(self, ws_type):
ws_samples = {
'valid': " \t\n",
'invalid_int': " \t"
}
return self._get_lexer(ws_samples[ws_type])
def test_get_int(self):
lexer = self._sample_ws('valid')
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
@raises(lexer.IntError)
def test_invalid_int(self):
lexer = self._sample_ws('invalid_int')
lexer._get_int()
def test_get_token(self):
lexer = self._sample_ws('valid')
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._sample_ws('valid')
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
| <commit_before>from whitepy.lexerconstants import *
import whitepy.lexer as lexer
import unittest
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _valid_ws(self):
return self._get_lexer(" \t\n")
def test_get_int(self):
lexer = self._valid_ws()
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
def test_get_token(self):
lexer = self._valid_ws()
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._valid_ws()
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
<commit_msg>Add test for invalid integer
A valid integer in Whitesapce ends with '\n', if an invalid integer is found the
code should raise a IntError exception.
As part of this, I have renamed `_valid_ws()` to `_sample_ws()`, which now takes
a argument for the type of whitespace needed.<commit_after> | from nose.tools import *
from whitepy.lexerconstants import *
import unittest
import whitepy.lexer as lexer
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _sample_ws(self, ws_type):
ws_samples = {
'valid': " \t\n",
'invalid_int': " \t"
}
return self._get_lexer(ws_samples[ws_type])
def test_get_int(self):
lexer = self._sample_ws('valid')
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
@raises(lexer.IntError)
def test_invalid_int(self):
lexer = self._sample_ws('invalid_int')
lexer._get_int()
def test_get_token(self):
lexer = self._sample_ws('valid')
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._sample_ws('valid')
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
| from whitepy.lexerconstants import *
import whitepy.lexer as lexer
import unittest
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _valid_ws(self):
return self._get_lexer(" \t\n")
def test_get_int(self):
lexer = self._valid_ws()
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
def test_get_token(self):
lexer = self._valid_ws()
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._valid_ws()
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
Add test for invalid integer
A valid integer in Whitesapce ends with '\n', if an invalid integer is found the
code should raise a IntError exception.
As part of this, I have renamed `_valid_ws()` to `_sample_ws()`, which now takes
a argument for the type of whitespace needed.from nose.tools import *
from whitepy.lexerconstants import *
import unittest
import whitepy.lexer as lexer
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _sample_ws(self, ws_type):
ws_samples = {
'valid': " \t\n",
'invalid_int': " \t"
}
return self._get_lexer(ws_samples[ws_type])
def test_get_int(self):
lexer = self._sample_ws('valid')
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
@raises(lexer.IntError)
def test_invalid_int(self):
lexer = self._sample_ws('invalid_int')
lexer._get_int()
def test_get_token(self):
lexer = self._sample_ws('valid')
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._sample_ws('valid')
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
| <commit_before>from whitepy.lexerconstants import *
import whitepy.lexer as lexer
import unittest
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _valid_ws(self):
return self._get_lexer(" \t\n")
def test_get_int(self):
lexer = self._valid_ws()
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
def test_get_token(self):
lexer = self._valid_ws()
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._valid_ws()
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
<commit_msg>Add test for invalid integer
A valid integer in Whitesapce ends with '\n', if an invalid integer is found the
code should raise a IntError exception.
As part of this, I have renamed `_valid_ws()` to `_sample_ws()`, which now takes
a argument for the type of whitespace needed.<commit_after>from nose.tools import *
from whitepy.lexerconstants import *
import unittest
import whitepy.lexer as lexer
class TestLexer(unittest.TestCase):
def _get_lexer(self, line):
return lexer.Lexer(line=line)
def _sample_ws(self, ws_type):
ws_samples = {
'valid': " \t\n",
'invalid_int': " \t"
}
return self._get_lexer(ws_samples[ws_type])
def test_get_int(self):
lexer = self._sample_ws('valid')
r = lexer._get_int()
assert r.get_type() == 'INT' and r.get_value() == 'POSITIVE 001'
@raises(lexer.IntError)
def test_invalid_int(self):
lexer = self._sample_ws('invalid_int')
lexer._get_int()
def test_get_token(self):
lexer = self._sample_ws('valid')
lexer.pos = 1
r = lexer._get_token(IMP_CONST)
assert r.get_type() == 'STACK_MANIPULATION'
def test_get_all_tokens(self):
lexer = self._sample_ws('valid')
lexer.get_all_tokens()
t = lexer.tokens
assert t[0].get_type() is 'STACK_MANIPULATION' and \
t[1].get_type() is 'PUSH' and t[2].get_type() is 'INT'
|
53878700a4da22e80114ef67a4aee340846abf91 | us_ignite/search/urls.py | us_ignite/search/urls.py | from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'apps/', 'search_apps', name='search_apps'),
url(r'events/', 'search_events', name='search_events'),
url(r'hubs/', 'search_hubs', name='search_hubs'),
url(r'orgs/', 'search_organizations', name='search_organizations'),
url(r'resources/', 'search_resources', name='search_resources'),
)
| from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'^apps/$', 'search_apps', name='search_apps'),
url(r'^events/$', 'search_events', name='search_events'),
url(r'^hubs/$', 'search_hubs', name='search_hubs'),
url(r'^orgs/$', 'search_organizations', name='search_organizations'),
url(r'^resources/$', 'search_resources', name='search_resources'),
)
| Fix broad regex for the ``search`` URLs. | Fix broad regex for the ``search`` URLs.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'apps/', 'search_apps', name='search_apps'),
url(r'events/', 'search_events', name='search_events'),
url(r'hubs/', 'search_hubs', name='search_hubs'),
url(r'orgs/', 'search_organizations', name='search_organizations'),
url(r'resources/', 'search_resources', name='search_resources'),
)
Fix broad regex for the ``search`` URLs. | from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'^apps/$', 'search_apps', name='search_apps'),
url(r'^events/$', 'search_events', name='search_events'),
url(r'^hubs/$', 'search_hubs', name='search_hubs'),
url(r'^orgs/$', 'search_organizations', name='search_organizations'),
url(r'^resources/$', 'search_resources', name='search_resources'),
)
| <commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'apps/', 'search_apps', name='search_apps'),
url(r'events/', 'search_events', name='search_events'),
url(r'hubs/', 'search_hubs', name='search_hubs'),
url(r'orgs/', 'search_organizations', name='search_organizations'),
url(r'resources/', 'search_resources', name='search_resources'),
)
<commit_msg>Fix broad regex for the ``search`` URLs.<commit_after> | from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'^apps/$', 'search_apps', name='search_apps'),
url(r'^events/$', 'search_events', name='search_events'),
url(r'^hubs/$', 'search_hubs', name='search_hubs'),
url(r'^orgs/$', 'search_organizations', name='search_organizations'),
url(r'^resources/$', 'search_resources', name='search_resources'),
)
| from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'apps/', 'search_apps', name='search_apps'),
url(r'events/', 'search_events', name='search_events'),
url(r'hubs/', 'search_hubs', name='search_hubs'),
url(r'orgs/', 'search_organizations', name='search_organizations'),
url(r'resources/', 'search_resources', name='search_resources'),
)
Fix broad regex for the ``search`` URLs.from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'^apps/$', 'search_apps', name='search_apps'),
url(r'^events/$', 'search_events', name='search_events'),
url(r'^hubs/$', 'search_hubs', name='search_hubs'),
url(r'^orgs/$', 'search_organizations', name='search_organizations'),
url(r'^resources/$', 'search_resources', name='search_resources'),
)
| <commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'apps/', 'search_apps', name='search_apps'),
url(r'events/', 'search_events', name='search_events'),
url(r'hubs/', 'search_hubs', name='search_hubs'),
url(r'orgs/', 'search_organizations', name='search_organizations'),
url(r'resources/', 'search_resources', name='search_resources'),
)
<commit_msg>Fix broad regex for the ``search`` URLs.<commit_after>from django.conf.urls import patterns, url
urlpatterns = patterns(
'us_ignite.search.views',
url(r'^apps/$', 'search_apps', name='search_apps'),
url(r'^events/$', 'search_events', name='search_events'),
url(r'^hubs/$', 'search_hubs', name='search_hubs'),
url(r'^orgs/$', 'search_organizations', name='search_organizations'),
url(r'^resources/$', 'search_resources', name='search_resources'),
)
|
1d8fc0e63e5527b6f65da4899b432dcdfa243557 | ava/text_to_speech/__init__.py | ava/text_to_speech/__init__.py | import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
| import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
pass
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
| Allow user to use ava without TTS on Linux | Allow user to use ava without TTS on Linux
| Python | mit | ava-project/AVA | import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
Allow user to use ava without TTS on Linux | import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
pass
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
| <commit_before>import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
<commit_msg>Allow user to use ava without TTS on Linux<commit_after> | import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
pass
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
| import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
Allow user to use ava without TTS on Linuximport time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
pass
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
| <commit_before>import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
<commit_msg>Allow user to use ava without TTS on Linux<commit_after>import time
import os
from tempfile import NamedTemporaryFile
from sys import platform as _platform
from gtts import gTTS
from .playsound import playsound
from ..queues import QueueTtS
from ..components import _BaseComponent
class TextToSpeech(_BaseComponent):
def __init__(self):
super().__init__()
self.queue_tts = QueueTtS()
def run(self):
sentence = self.queue_tts.get()
print('To say out loud : {}'.format(sentence))
tts = gTTS(text=sentence, lang='en')
if _platform == "darwin":
with NamedTemporaryFile() as audio_file:
tts.write_to_fp(audio_file)
audio_file.seek(0)
playsound(audio_file.name)
else:
filename = str(time.time()).split('.')[0] + ".mp3"
tts.save(filename)
if _platform == "linux" or _platform == "linux2":
pass
else:
playsound(filename)
os.remove(filename)
self.queue_tts.task_done()
|
f6194866a98dccdb8e1c1a1dfee40b11034461ba | src/nyct-json.py | src/nyct-json.py | #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
for stop_id, departures in stops.items():
file = open(temp, 'w+')
file.write(json.dumps(sorted(departures)))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"))
| #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
def write(filename, json):
file = open(temp, 'w+')
file.write(json.dumps(json))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, filename)
for stop_id, departures in stops.items():
write(os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"), sorted(departures))
| Use a function to write. | Use a function to write.
| Python | isc | natestedman/nyct-json,natestedman/nyct-json | #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
for stop_id, departures in stops.items():
file = open(temp, 'w+')
file.write(json.dumps(sorted(departures)))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"))
Use a function to write. | #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
def write(filename, json):
file = open(temp, 'w+')
file.write(json.dumps(json))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, filename)
for stop_id, departures in stops.items():
write(os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"), sorted(departures))
| <commit_before>#!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
for stop_id, departures in stops.items():
file = open(temp, 'w+')
file.write(json.dumps(sorted(departures)))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"))
<commit_msg>Use a function to write.<commit_after> | #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
def write(filename, json):
file = open(temp, 'w+')
file.write(json.dumps(json))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, filename)
for stop_id, departures in stops.items():
write(os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"), sorted(departures))
| #!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
for stop_id, departures in stops.items():
file = open(temp, 'w+')
file.write(json.dumps(sorted(departures)))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"))
Use a function to write.#!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
def write(filename, json):
file = open(temp, 'w+')
file.write(json.dumps(json))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, filename)
for stop_id, departures in stops.items():
write(os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"), sorted(departures))
| <commit_before>#!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
for stop_id, departures in stops.items():
file = open(temp, 'w+')
file.write(json.dumps(sorted(departures)))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"))
<commit_msg>Use a function to write.<commit_after>#!/usr/bin/env python
import json
import settings
import os
import urllib2
from proto import gtfs_realtime_pb2
message = gtfs_realtime_pb2.FeedMessage()
url = urllib2.urlopen('http://datamine.mta.info/mta_esi.php?key={0}&feed_id={1}'.format(settings.MTA_API_KEY, settings.MTA_FEED_ID))
message.ParseFromString(url.read())
url.close()
stops = {}
for entity in message.entity:
if entity.trip_update.trip.route_id == "L":
for stop_time_update in entity.trip_update.stop_time_update:
stop_id = stop_time_update.stop_id
if stop_id not in stops:
stops[stop_id] = []
stops[stop_id].append(stop_time_update.departure.time)
temp = os.path.join(settings.JSON_OUT_DIR, 'temp')
def write(filename, json):
file = open(temp, 'w+')
file.write(json.dumps(json))
file.flush()
os.fsync(file)
file.close()
os.rename(temp, filename)
for stop_id, departures in stops.items():
write(os.path.join(settings.JSON_OUT_DIR, stop_id + ".json"), sorted(departures))
|
e31192cf4989c1cef481eb92d6a91ae99dd8e5f5 | src/pip/_internal/distributions/__init__.py | src/pip/_internal/distributions/__init__.py | from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.link and install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
| from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
| Simplify conditional for choosing WheelDistribution | Simplify conditional for choosing WheelDistribution
| Python | mit | xavfernandez/pip,pradyunsg/pip,rouge8/pip,rouge8/pip,sbidoul/pip,sbidoul/pip,pypa/pip,pfmoore/pip,pradyunsg/pip,xavfernandez/pip,pfmoore/pip,xavfernandez/pip,pypa/pip,rouge8/pip | from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.link and install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
Simplify conditional for choosing WheelDistribution | from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
| <commit_before>from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.link and install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
<commit_msg>Simplify conditional for choosing WheelDistribution<commit_after> | from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
| from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.link and install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
Simplify conditional for choosing WheelDistributionfrom pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
| <commit_before>from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.link and install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
<commit_msg>Simplify conditional for choosing WheelDistribution<commit_after>from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
# If it's not an editable, is a wheel, it's a WheelDistribution
if install_req.editable:
return SourceDistribution(install_req)
if install_req.is_wheel:
return WheelDistribution(install_req)
# Otherwise, a SourceDistribution
return SourceDistribution(install_req)
|
26579d307d44f00fe71853fa6c13957018fe5c0f | capnp/__init__.py | capnp/__init__.py | """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
| """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
add_import_hook() # enable import hook by default
| Enable import hook by default | Enable import hook by default
| Python | bsd-2-clause | rcrowder/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,SymbiFlow/pycapnp,SymbiFlow/pycapnp,tempbottle/pycapnp,tempbottle/pycapnp,jparyani/pycapnp,rcrowder/pycapnp,SymbiFlow/pycapnp,jparyani/pycapnp,jparyani/pycapnp,tempbottle/pycapnp,rcrowder/pycapnp,rcrowder/pycapnp,tempbottle/pycapnp | """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
Enable import hook by default | """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
add_import_hook() # enable import hook by default
| <commit_before>"""A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
<commit_msg>Enable import hook by default<commit_after> | """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
add_import_hook() # enable import hook by default
| """A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
Enable import hook by default"""A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
add_import_hook() # enable import hook by default
| <commit_before>"""A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
<commit_msg>Enable import hook by default<commit_after>"""A python library wrapping the Cap'n Proto C++ library
Example Usage::
import capnp
addressbook = capnp.load('addressbook.capnp')
# Building
addresses = addressbook.AddressBook.newMessage()
people = addresses.init('people', 1)
alice = people[0]
alice.id = 123
alice.name = 'Alice'
alice.email = 'alice@example.com'
alicePhone = alice.init('phones', 1)[0]
alicePhone.type = 'mobile'
f = open('example.bin', 'w')
addresses.writeTo(f)
f.close()
# Reading
f = open('example.bin')
addresses = addressbook.AddressBook.readFrom(f)
for person in addresses.people:
print(person.name, ':', person.email)
for phone in person.phones:
print(phone.type, ':', phone.number)
"""
from .version import version as __version__
from .capnp import *
from .capnp import _DynamicStructReader, _DynamicStructBuilder, _DynamicResizableListBuilder, _DynamicListReader, _DynamicListBuilder, _DynamicOrphan, _DynamicResizableListBuilder, _MallocMessageBuilder, _PackedFdMessageReader, _StreamFdMessageReader, _write_message_to_fd, _write_packed_message_to_fd
del capnp
add_import_hook() # enable import hook by default
|
003c7f9b7c6d35e176a9a4d5c56b61b0b4f96281 | webapp/tests/__init__.py | webapp/tests/__init__.py | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self, env='test'):
self.app = create_app(env, initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| Allow use of a custom environment for tests. | Allow use of a custom environment for tests.
| Python | bsd-3-clause | m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
Allow use of a custom environment for tests. | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self, env='test'):
self.app = create_app(env, initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| <commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
<commit_msg>Allow use of a custom environment for tests.<commit_after> | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self, env='test'):
self.app = create_app(env, initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
Allow use of a custom environment for tests.# -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self, env='test'):
self.app = create_app(env, initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| <commit_before># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
<commit_msg>Allow use of a custom environment for tests.<commit_after># -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self, env='test'):
self.app = create_app(env, initialize=False)
self.db = db
db.app = self.app
db.drop_all()
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
self.brand = Brand(id='acme', title='ACME')
db.session.add(self.brand)
self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014')
db.session.add(self.party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
|
685365af5126c6e83db468eef24b008fc1526462 | tools/game_utils.py | tools/game_utils.py | import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
| import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
| Add method to verify that all strategy probabilities add to 1 | Add method to verify that all strategy probabilities add to 1
| Python | mit | JakubPetriska/poker-cfr,JakubPetriska/poker-cfr | import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
Add method to verify that all strategy probabilities add to 1 | import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
| <commit_before>import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
<commit_msg>Add method to verify that all strategy probabilities add to 1<commit_after> | import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
| import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
Add method to verify that all strategy probabilities add to 1import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
| <commit_before>import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
<commit_msg>Add method to verify that all strategy probabilities add to 1<commit_after>import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
|
4368567e44c144e85fa9fcdb72f2648c13eb8158 | rest_framework_jsonp/renderers.py | rest_framework_jsonp/renderers.py | """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| Update for compat w/ djangorestframework 3.2 | Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params | Python | isc | baxrob/django-rest-framework-jsonp | """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params | """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| <commit_before>"""
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
<commit_msg>Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params<commit_after> | """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| """
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params"""
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
| <commit_before>"""
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
<commit_msg>Update for compat w/ djangorestframework 3.2
Change request.QUERY_PARAMS to request.query_params<commit_after>"""
Provides JSONP rendering support.
"""
from __future__ import unicode_literals
from rest_framework.renderers import JSONRenderer
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = 'application/javascript'
format = 'jsonp'
callback_parameter = 'callback'
default_callback = 'callback'
charset = 'utf-8'
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get('request', None)
params = request and request.query_params or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b'(' + json + b');'
|
d095277e1d54da6b9fcef7c5a79e86dc3f759e81 | word-count/word_count.py | word-count/word_count.py | def word_count(s):
words = strip_punc(s.lower()).split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
| def word_count(s):
words = strip_punc(s).lower().split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
| Move .lower() method call for readability | Move .lower() method call for readability
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | def word_count(s):
words = strip_punc(s.lower()).split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
Move .lower() method call for readability | def word_count(s):
words = strip_punc(s).lower().split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
| <commit_before>def word_count(s):
words = strip_punc(s.lower()).split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
<commit_msg>Move .lower() method call for readability<commit_after> | def word_count(s):
words = strip_punc(s).lower().split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
| def word_count(s):
words = strip_punc(s.lower()).split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
Move .lower() method call for readabilitydef word_count(s):
words = strip_punc(s).lower().split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
| <commit_before>def word_count(s):
words = strip_punc(s.lower()).split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
<commit_msg>Move .lower() method call for readability<commit_after>def word_count(s):
words = strip_punc(s).lower().split()
return {word: words.count(word) for word in set(words)}
def strip_punc(s):
return "".join(ch if ch.isalnum() else " " for ch in s)
|
e9c32014093af49edc6a12b6db37b44a04d12892 | test/integration/ggrc_workflows/__init__.py | test/integration/ggrc_workflows/__init__.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
| # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class WorkflowTestCase(TestCase):
def setUp(self):
super(WorkflowTestCase, self).setUp()
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
self.random_objects = self.object_generator.generate_random_objects()
| Add WorkflowTestCase to integration tests | Add WorkflowTestCase to integration tests
| Python | apache-2.0 | josthkko/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
Add WorkflowTestCase to integration tests | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class WorkflowTestCase(TestCase):
def setUp(self):
super(WorkflowTestCase, self).setUp()
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
self.random_objects = self.object_generator.generate_random_objects()
| <commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
<commit_msg>Add WorkflowTestCase to integration tests<commit_after> | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class WorkflowTestCase(TestCase):
def setUp(self):
super(WorkflowTestCase, self).setUp()
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
self.random_objects = self.object_generator.generate_random_objects()
| # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
Add WorkflowTestCase to integration tests# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class WorkflowTestCase(TestCase):
def setUp(self):
super(WorkflowTestCase, self).setUp()
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
self.random_objects = self.object_generator.generate_random_objects()
| <commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
<commit_msg>Add WorkflowTestCase to integration tests<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from integration.ggrc import TestCase
from integration.ggrc_workflows.generator import WorkflowsGenerator
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class WorkflowTestCase(TestCase):
def setUp(self):
super(WorkflowTestCase, self).setUp()
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
self.random_objects = self.object_generator.generate_random_objects()
|
edae3aa862d6266ee1f2f96a2427e88af994f590 | Nested_list.py | Nested_list.py | '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
| '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
| Fix indentation for break keyword. | Fix indentation for break keyword.
| Python | unlicense | jasielserra/PythonDevs | '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
Fix indentation for break keyword. | '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
| <commit_before>'''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
<commit_msg>Fix indentation for break keyword.<commit_after> | '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
| '''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
Fix indentation for break keyword.'''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
| <commit_before>'''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
<commit_msg>Fix indentation for break keyword.<commit_after>'''
Given the names and grades for each student in a Physics class of N students,
store them in a nested list and print the name(s) of any student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their names alphabetically and print
each name on a new line.
By HackerRank
'''
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name,score])
students.sort(key=lambda x: x[1])
second_lowest_grade = students[0][1]
for name,grade in students:
if grade > second_lowest_grade:
second_lowest_grade = grade
break
names = []
for name, grade in sorted(students):
if grade == second_lowest_grade:
names.append(name)
print('\n'.join(names))
|
7d43df8d861b73ffb7c5e11531f3848a248bcc6a | trial_search_app.py | trial_search_app.py | from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return display_trial(form.nct_id.data)
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
| from flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial/<string:nct_id>')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return redirect(url_for('display_trial', nct_id=form.nct_id.data))
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
| Correct redirect after search from submission | Correct redirect after search from submission
| Python | apache-2.0 | hniedner/ctrp_rest_client,hniedner/ctrp_rest_client,hniedner/ctrp_rest_client | from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return display_trial(form.nct_id.data)
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
Correct redirect after search from submission | from flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial/<string:nct_id>')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return redirect(url_for('display_trial', nct_id=form.nct_id.data))
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
| <commit_before>from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return display_trial(form.nct_id.data)
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
<commit_msg>Correct redirect after search from submission<commit_after> | from flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial/<string:nct_id>')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return redirect(url_for('display_trial', nct_id=form.nct_id.data))
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
| from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return display_trial(form.nct_id.data)
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
Correct redirect after search from submissionfrom flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial/<string:nct_id>')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return redirect(url_for('display_trial', nct_id=form.nct_id.data))
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
| <commit_before>from flask import Flask, render_template
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return display_trial(form.nct_id.data)
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
<commit_msg>Correct redirect after search from submission<commit_after>from flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_nav import Nav
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms.validators import DataRequired
class SimpleSearchForm(FlaskForm):
nct_id = StringField('nct_id', validators=[DataRequired()])
import api_client
# Initialize the Flask application
app = Flask(__name__)
app.secret_key = 'ctrp'
app.debug = True
# Install our Bootstrap extension
Bootstrap(app)
# We initialize the navigation as well
nav = Nav()
nav.init_app(app)
@app.route('/')
def home():
return search_form()
@app.route('/display_trial/<string:nct_id>')
def display_trial(nct_id):
# retrieving trial as dictionary from the CTRP API client
trial_dict = api_client.get_trial_by_nct_id(nct_id)
# Render template
return render_template('display_trial.html', trial=trial_dict)
@app.route('/search_form', methods=('GET', 'POST'))
def search_form():
form = SimpleSearchForm()
if form.validate_on_submit():
return redirect(url_for('display_trial', nct_id=form.nct_id.data))
# Render template
return render_template('search_form.html', form=form)
# Run Flask webapp
if __name__ == '__main__':
app.run()
|
606cb3475e2e4220822f924d13881dfaefb51aa4 | teryt_tree/rest_framework_ext/viewsets.py | teryt_tree/rest_framework_ext/viewsets.py | import django_filters
from django.shortcuts import get_object_or_404
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(action=custom_area_filter)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
| import django_filters
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import \
JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(
method=custom_area_filter,
label=_("Area")
)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
| Update JednostkaAdministracyjnaFilter for new django-filters | Update JednostkaAdministracyjnaFilter for new django-filters
| Python | bsd-3-clause | ad-m/django-teryt-tree | import django_filters
from django.shortcuts import get_object_or_404
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(action=custom_area_filter)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
Update JednostkaAdministracyjnaFilter for new django-filters | import django_filters
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import \
JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(
method=custom_area_filter,
label=_("Area")
)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
| <commit_before>import django_filters
from django.shortcuts import get_object_or_404
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(action=custom_area_filter)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
<commit_msg>Update JednostkaAdministracyjnaFilter for new django-filters<commit_after> | import django_filters
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import \
JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(
method=custom_area_filter,
label=_("Area")
)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
| import django_filters
from django.shortcuts import get_object_or_404
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(action=custom_area_filter)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
Update JednostkaAdministracyjnaFilter for new django-filtersimport django_filters
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import \
JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(
method=custom_area_filter,
label=_("Area")
)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
| <commit_before>import django_filters
from django.shortcuts import get_object_or_404
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(action=custom_area_filter)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
<commit_msg>Update JednostkaAdministracyjnaFilter for new django-filters<commit_after>import django_filters
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
try:
from django_filters import rest_framework as filters
except ImportError: # Back-ward compatible for django-rest-framework<3.7
from rest_framework import filters
from rest_framework import viewsets
from teryt_tree.models import JednostkaAdministracyjna
from teryt_tree.rest_framework_ext.serializers import \
JednostkaAdministracyjnaSerializer
def custom_area_filter(queryset, _, value):
if not value:
return queryset
return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value))
class JednostkaAdministracyjnaFilter(filters.FilterSet):
area = django_filters.CharFilter(
method=custom_area_filter,
label=_("Area")
)
class Meta:
model = JednostkaAdministracyjna
fields = ['name', 'category', 'category__level', 'area']
class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet):
queryset = (JednostkaAdministracyjna.objects.
select_related('category').
prefetch_related('children').
all())
serializer_class = JednostkaAdministracyjnaSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_class = JednostkaAdministracyjnaFilter
|
1090acb35ea4ce5c8d17db716539d3354feabc12 | nodeconductor/iaas/migrations/0038_securitygroup_state.py | nodeconductor/iaas/migrations/0038_securitygroup_state.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
def mark_security_groups_as_synced(apps, schema_editor):
SecurityGroup = apps.get_model('iaas', 'SecurityGroup')
SecurityGroup.objects.all().update(state=3)
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
migrations.RunPython(mark_security_groups_as_synced),
]
| Mark all exist security groups as synced | Mark all exist security groups as synced
- itacloud-4843
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
]
Mark all exist security groups as synced
- itacloud-4843 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
def mark_security_groups_as_synced(apps, schema_editor):
SecurityGroup = apps.get_model('iaas', 'SecurityGroup')
SecurityGroup.objects.all().update(state=3)
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
migrations.RunPython(mark_security_groups_as_synced),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
]
<commit_msg>Mark all exist security groups as synced
- itacloud-4843<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
def mark_security_groups_as_synced(apps, schema_editor):
SecurityGroup = apps.get_model('iaas', 'SecurityGroup')
SecurityGroup.objects.all().update(state=3)
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
migrations.RunPython(mark_security_groups_as_synced),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
]
Mark all exist security groups as synced
- itacloud-4843# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
def mark_security_groups_as_synced(apps, schema_editor):
SecurityGroup = apps.get_model('iaas', 'SecurityGroup')
SecurityGroup.objects.all().update(state=3)
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
migrations.RunPython(mark_security_groups_as_synced),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
]
<commit_msg>Mark all exist security groups as synced
- itacloud-4843<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django_fsm
def mark_security_groups_as_synced(apps, schema_editor):
SecurityGroup = apps.get_model('iaas', 'SecurityGroup')
SecurityGroup.objects.all().update(state=3)
class Migration(migrations.Migration):
dependencies = [
('iaas', '0037_init_security_groups_quotas'),
]
operations = [
migrations.AddField(
model_name='securitygroup',
name='state',
field=django_fsm.FSMIntegerField(default=1, choices=[(1, 'Sync Scheduled'), (2, 'Syncing'), (3, 'In Sync'), (4, 'Erred')]),
preserve_default=True,
),
migrations.RunPython(mark_security_groups_as_synced),
]
|
3e6e485443a901660a461dbbc8b324bfe4c19c8f | tests/v5/conftest.py | tests/v5/conftest.py | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_GENERATORS | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS
@pytest.fixture
def exemplar_primitive_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS | Add fixture for exemplar primitive generators | Add fixture for exemplar primitive generators
| Python | mit | maxalbert/tohu | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_GENERATORSAdd fixture for exemplar primitive generators | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS
@pytest.fixture
def exemplar_primitive_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS | <commit_before>import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_GENERATORS<commit_msg>Add fixture for exemplar primitive generators<commit_after> | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS
@pytest.fixture
def exemplar_primitive_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS | import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_GENERATORSAdd fixture for exemplar primitive generatorsimport pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS
@pytest.fixture
def exemplar_primitive_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS | <commit_before>import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_GENERATORS<commit_msg>Add fixture for exemplar primitive generators<commit_after>import pytest
from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Boolean(p=0.3),
]
@pytest.fixture
def exemplar_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS
@pytest.fixture
def exemplar_primitive_generators():
"""
Return a list of generators which contains an example
for each type of generator supported by tohu.
"""
return EXEMPLAR_PRIMITIVE_GENERATORS |
5d44a67c1e416f8024bac1cbef5f3b3516ffd42a | cms_shiny/urls.py | cms_shiny/urls.py | from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
| from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)/$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
| Append trailing slash to detail URL | Append trailing slash to detail URL
| Python | bsd-3-clause | mfcovington/djangocms-shiny-app,mfcovington/djangocms-shiny-app,mfcovington/djangocms-shiny-app | from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
Append trailing slash to detail URL | from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)/$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
| <commit_before>from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
<commit_msg>Append trailing slash to detail URL<commit_after> | from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)/$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
| from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
Append trailing slash to detail URLfrom django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)/$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
| <commit_before>from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
<commit_msg>Append trailing slash to detail URL<commit_after>from django.conf.urls import patterns, url
from cms_shiny.views import ShinyAppListView, ShinyAppDetailView
urlpatterns = patterns('',
url(r'^$', ShinyAppListView.as_view(), name='shiny_list'),
url(r'^(?P<slug>[^/]+)/$', ShinyAppDetailView.as_view(), name='shiny_detail'),
)
|
0d302a13475b4e8ae073cad8c667019419a6a7e8 | vdb/zika_download.py | vdb/zika_download.py | import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus',
'authors', 'latitude', 'longitude']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__) | import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus', 'authors']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)
| Remove lat/long from zika download | Remove lat/long from zika download
| Python | agpl-3.0 | nextstrain/fauna,blab/nextstrain-db,blab/nextstrain-db,nextstrain/fauna | import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus',
'authors', 'latitude', 'longitude']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)Remove lat/long from zika download | import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus', 'authors']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)
| <commit_before>import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus',
'authors', 'latitude', 'longitude']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)<commit_msg>Remove lat/long from zika download<commit_after> | import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus', 'authors']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)
| import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus',
'authors', 'latitude', 'longitude']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)Remove lat/long from zika downloadimport os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus', 'authors']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)
| <commit_before>import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus',
'authors', 'latitude', 'longitude']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)<commit_msg>Remove lat/long from zika download<commit_after>import os,datetime
from download import download
from download import get_parser
class zika_download(download):
def __init__(self, **kwargs):
download.__init__(self, **kwargs)
if __name__=="__main__":
parser = get_parser()
args = parser.parse_args()
fasta_fields = ['strain', 'virus', 'accession', 'collection_date', 'region',
'country', 'division', 'location', 'source', 'locus', 'authors']
args.fasta_fields = fasta_fields
current_date = str(datetime.datetime.strftime(datetime.datetime.now(),'%Y_%m_%d'))
if args.fstem is None:
args.fstem = args.virus + '_' + current_date
if not os.path.isdir(args.path):
os.makedirs(args.path)
connfluVDB = zika_download(**args.__dict__)
connfluVDB.download(**args.__dict__)
|
f946ca92b74bb945c3884fcfa3e515132ec56b06 | virtool/processes.py | virtool/processes.py | STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
| STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
class ProgressTracker:
def __init__(self, total, db=None, increment=0.05, factor=1):
self.total = total
self.db = db
self.increment = increment
self.factor = factor
self.count = 0
self.last_reported = 0
def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
return self.progress
async def reported(self):
self.last_reported = self.progress
@property
def progress(self):
return round(self.count / self.total, 2)
| Add ProgressTracker class for tracking progress in long operations | Add ProgressTracker class for tracking progress in long operations
| Python | mit | virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool | STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
Add ProgressTracker class for tracking progress in long operations | STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
class ProgressTracker:
def __init__(self, total, db=None, increment=0.05, factor=1):
self.total = total
self.db = db
self.increment = increment
self.factor = factor
self.count = 0
self.last_reported = 0
def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
return self.progress
async def reported(self):
self.last_reported = self.progress
@property
def progress(self):
return round(self.count / self.total, 2)
| <commit_before>STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
<commit_msg>Add ProgressTracker class for tracking progress in long operations<commit_after> | STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
class ProgressTracker:
def __init__(self, total, db=None, increment=0.05, factor=1):
self.total = total
self.db = db
self.increment = increment
self.factor = factor
self.count = 0
self.last_reported = 0
def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
return self.progress
async def reported(self):
self.last_reported = self.progress
@property
def progress(self):
return round(self.count / self.total, 2)
| STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
Add ProgressTracker class for tracking progress in long operationsSTEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
class ProgressTracker:
def __init__(self, total, db=None, increment=0.05, factor=1):
self.total = total
self.db = db
self.increment = increment
self.factor = factor
self.count = 0
self.last_reported = 0
def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
return self.progress
async def reported(self):
self.last_reported = self.progress
@property
def progress(self):
return round(self.count / self.total, 2)
| <commit_before>STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
<commit_msg>Add ProgressTracker class for tracking progress in long operations<commit_after>STEP_COUNTS = {
"import_reference": 0,
"setup_remote_reference": 0,
"update_remote_reference": 0,
"update_software": 0,
"install_hmms": 0
}
FIRST_STEPS = {
"import_reference": "load_file",
"setup_remote_reference": "",
"update_remote_reference": "",
"update_software": "",
"install_hmms": ""
}
UNIQUES = [
"update_software",
"install_hmms"
]
class ProgressTracker:
def __init__(self, total, db=None, increment=0.05, factor=1):
self.total = total
self.db = db
self.increment = increment
self.factor = factor
self.count = 0
self.last_reported = 0
def add(self, value):
count = self.count + value
if count > self.total:
raise ValueError("Count cannot exceed total")
self.count = count
return self.progress
async def reported(self):
self.last_reported = self.progress
@property
def progress(self):
return round(self.count / self.total, 2)
|
63c0cd90ff9e9a721b175cdd4af8dc52ed6412ad | flatkeys/__init__.py | flatkeys/__init__.py | # -*- coding: UTF-8 -*-
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if type(v) is dict:
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
| # -*- coding: UTF-8 -*-
import collections
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if isinstance(v, collections.Mapping):
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
| Use isinstance check so library can be used for more types | Use isinstance check so library can be used for more types | Python | mit | bfontaine/flatkeys | # -*- coding: UTF-8 -*-
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if type(v) is dict:
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
Use isinstance check so library can be used for more types | # -*- coding: UTF-8 -*-
import collections
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if isinstance(v, collections.Mapping):
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
| <commit_before># -*- coding: UTF-8 -*-
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if type(v) is dict:
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
<commit_msg>Use isinstance check so library can be used for more types<commit_after> | # -*- coding: UTF-8 -*-
import collections
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if isinstance(v, collections.Mapping):
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
| # -*- coding: UTF-8 -*-
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if type(v) is dict:
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
Use isinstance check so library can be used for more types# -*- coding: UTF-8 -*-
import collections
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if isinstance(v, collections.Mapping):
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
| <commit_before># -*- coding: UTF-8 -*-
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if type(v) is dict:
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
<commit_msg>Use isinstance check so library can be used for more types<commit_after># -*- coding: UTF-8 -*-
import collections
__version__ = '0.1.0'
def flatkeys(d, sep="."):
"""
Flatten a dictionary: build a new dictionary from a given one where all
non-dict values are left untouched but nested ``dict``s are recursively
merged in the new one with their keys prefixed by their parent key.
>>> flatkeys({1: 42, 'foo': 12})
{1: 42, 'foo': 12}
>>> flatkeys({1: 42, 'foo': 12, 'bar': {'qux': True}})
{1: 42, 'foo': 12, 'bar.qux': True}
>>> flatkeys({1: {2: {3: 4}}})
{'1.2.3': 4}
>>> flatkeys({1: {2: {3: 4}, 5: 6}})
{'1.2.3': 4, '1.5': 6}
"""
flat = {}
dicts = [("", d)]
while dicts:
prefix, d = dicts.pop()
for k, v in d.items():
k_s = str(k)
if isinstance(v, collections.Mapping):
dicts.append(("%s%s%s" % (prefix, k_s, sep), v))
else:
k_ = prefix + k_s if prefix else k
flat[k_] = v
return flat
|
df04444b3932f7481562dde62c7ae1f8ffb8bd7e | webapp/controllers/contact.py | webapp/controllers/contact.py | # -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
| # -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
| Fix missing search-context list on Contact page. | Fix missing search-context list on Contact page.
| Python | bsd-2-clause | OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree | # -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
Fix missing search-context list on Contact page. | # -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
| <commit_before># -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
<commit_msg>Fix missing search-context list on Contact page.<commit_after> | # -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
| # -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
Fix missing search-context list on Contact page.# -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
| <commit_before># -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
<commit_msg>Fix missing search-context list on Contact page.<commit_after># -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
|
d4607cec7bc4fb4bcfc601fc8f1c35ea93131d3d | generate_c_arrays.py | generate_c_arrays.py | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color, ear=side)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "unsigned int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() | Fix for generate colors py | Fix for generate colors py
| Python | mit | evilsoapbox/GlowColors | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main()Fix for generate colors py | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color, ear=side)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "unsigned int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() | <commit_before>"""Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main()<commit_msg>Fix for generate colors py<commit_after> | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color, ear=side)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "unsigned int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() | """Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main()Fix for generate colors py"""Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color, ear=side)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "unsigned int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() | <commit_before>"""Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main()<commit_msg>Fix for generate colors py<commit_after>"""Generates color arrays for use in a C project.
This scipt generates a dump of array/color data for use in a A project. This simplifies the need
to hand-create and edit the data in many projects. This is useful, for instance, with Arduino projects.
"""
# Module imports
import glowcolors.encode
import glowcolors.message
import glowcolors.settings
# Constants
## NONE
# Functions
def main():
print "--> Generating arrays fror Arduino use <--\n"
# Generare both color arrays
generate_color_arrays('BOTH')
# Generate right color arrays
generate_color_arrays('RIGHT')
# Translate a python array into a string for Arduino
def array_to_string(array):
array_string = ','.join(str(x) for x in array)
return '{' + array_string + '}'
# Generate arrays for a given side
def generate_color_arrays(side):
# Array to hold encoded data
encoded_data = []
for color in glowcolors.settings.COLORS[side]:
message = glowcolors.message.generate(color, ear=side)
encoded_data = glowcolors.encode.ir_encode(message, True)
print "unsigned int %s_%s[%s] = %s" % (side, color, len(encoded_data), array_to_string(encoded_data))
# Helpers
## Run the main() function on execution
if __name__ == "__main__":
main() |
721f9d02645ba91c542e9eba243ddb617db0975e | Steamworks.NET_CodeGen.py | Steamworks.NET_CodeGen.py | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
| import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| Add typedef generation to the main script | Add typedef generation to the main script
| Python | mit | rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
Add typedef generation to the main script | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| <commit_before>import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
<commit_msg>Add typedef generation to the main script<commit_after> | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
Add typedef generation to the main scriptimport sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| <commit_before>import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
<commit_msg>Add typedef generation to the main script<commit_after>import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
|
555d441053731957f9648e835e4dbbd686f2f7e5 | whack/operations.py | whack/operations.py | import os
from catchy import HttpCacher, DirectoryCacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
| import os
from catchy import HttpCacher, xdg_directory_cacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = xdg_directory_cacher("whack/builds")
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
| Use XDG directory for caching | Use XDG directory for caching
| Python | bsd-2-clause | mwilliamson/whack | import os
from catchy import HttpCacher, DirectoryCacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
Use XDG directory for caching | import os
from catchy import HttpCacher, xdg_directory_cacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = xdg_directory_cacher("whack/builds")
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
| <commit_before>import os
from catchy import HttpCacher, DirectoryCacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
<commit_msg>Use XDG directory for caching<commit_after> | import os
from catchy import HttpCacher, xdg_directory_cacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = xdg_directory_cacher("whack/builds")
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
| import os
from catchy import HttpCacher, DirectoryCacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
Use XDG directory for cachingimport os
from catchy import HttpCacher, xdg_directory_cacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = xdg_directory_cacher("whack/builds")
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
| <commit_before>import os
from catchy import HttpCacher, DirectoryCacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
<commit_msg>Use XDG directory for caching<commit_after>import os
from catchy import HttpCacher, xdg_directory_cacher, NoCachingStrategy
from .installer import Installer
from .sources import PackageSourceFetcher
from .providers import CachingPackageProvider
from .deployer import PackageDeployer
def create(caching):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = xdg_directory_cacher("whack/builds")
package_source_fetcher = PackageSourceFetcher()
package_provider = CachingPackageProvider(cacher)
deployer = PackageDeployer()
installer = Installer(package_source_fetcher, package_provider, deployer)
return Operations(installer)
class Operations(object):
def __init__(self, installer):
self._installer = installer
def install(self, package_name, install_dir, params):
return self._installer.install(package_name, install_dir, params)
def build(self, package_name, install_dir, params):
return self._installer.build(package_name, install_dir, params)
def install(package, install_dir, caching, params):
operations = create(caching)
operations.install(package, install_dir, params)
def build(command, package, install_dir, caching, params):
operations = create(caching)
operations.build(package, install_dir, params)
|
504bd32e2f91b074595e1b86065eed318a22da8c | gitcommitautosave.py | gitcommitautosave.py | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG', 'addp-hunk-edit.diff')
if path and any(path.endswith(name) for name in git_files):
return True
| Add file created when using add -p | Add file created when using add -p
When using 'git add -p' or 'git checkout -p', if the user chooses
to manually edit the .diff file, Git creates a file called
'addp-hunk-edit.diff' (same file name for both operations).
| Python | mit | aristidesfl/sublime-git-commit-message-auto-save | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
Add file created when using add -p
When using 'git add -p' or 'git checkout -p', if the user chooses
to manually edit the .diff file, Git creates a file called
'addp-hunk-edit.diff' (same file name for both operations). | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG', 'addp-hunk-edit.diff')
if path and any(path.endswith(name) for name in git_files):
return True
| <commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
<commit_msg>Add file created when using add -p
When using 'git add -p' or 'git checkout -p', if the user chooses
to manually edit the .diff file, Git creates a file called
'addp-hunk-edit.diff' (same file name for both operations).<commit_after> | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG', 'addp-hunk-edit.diff')
if path and any(path.endswith(name) for name in git_files):
return True
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
Add file created when using add -p
When using 'git add -p' or 'git checkout -p', if the user chooses
to manually edit the .diff file, Git creates a file called
'addp-hunk-edit.diff' (same file name for both operations)."""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG', 'addp-hunk-edit.diff')
if path and any(path.endswith(name) for name in git_files):
return True
| <commit_before>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG')
if path and any(path.endswith(name) for name in git_files):
return True
<commit_msg>Add file created when using add -p
When using 'git add -p' or 'git checkout -p', if the user chooses
to manually edit the .diff file, Git creates a file called
'addp-hunk-edit.diff' (same file name for both operations).<commit_after>"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG', 'PULLREQ_EDITMSG', 'addp-hunk-edit.diff')
if path and any(path.endswith(name) for name in git_files):
return True
|
ac1f9ab2cb06be4060100fd8c0714e26a9e5c970 | openacademy/model/openacademy_course.py | openacademy/model/openacademy_course.py | from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
| from openerp import api,fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| Modify copy method into inherit | [REF] openacademy: Modify copy method into inherit
| Python | apache-2.0 | hellomoto6/openacademy | from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
[REF] openacademy: Modify copy method into inherit | from openerp import api,fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| <commit_before>from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
<commit_msg>[REF] openacademy: Modify copy method into inherit<commit_after> | from openerp import api,fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
[REF] openacademy: Modify copy method into inheritfrom openerp import api,fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| <commit_before>from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
<commit_msg>[REF] openacademy: Modify copy method into inherit<commit_after>from openerp import api,fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
|
2548ecd64d6a26b09fe79f5a369f731c66410aa0 | dadd/master/admin.py | dadd/master/admin.py | from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ModelView(models.Process, session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
| from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
class ProcessModelView(ModelView):
# Make the latest first
column_default_sort = ('start_time', True)
def __init__(self, session):
super(ProcessModelView, self).__init__(models.Process, session)
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ProcessModelView(session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
| Sort the processes by start time by default in the Admin. | Sort the processes by start time by default in the Admin.
| Python | bsd-3-clause | ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd | from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ModelView(models.Process, session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
Sort the processes by start time by default in the Admin. | from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
class ProcessModelView(ModelView):
# Make the latest first
column_default_sort = ('start_time', True)
def __init__(self, session):
super(ProcessModelView, self).__init__(models.Process, session)
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ProcessModelView(session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
| <commit_before>from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ModelView(models.Process, session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
<commit_msg>Sort the processes by start time by default in the Admin.<commit_after> | from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
class ProcessModelView(ModelView):
# Make the latest first
column_default_sort = ('start_time', True)
def __init__(self, session):
super(ProcessModelView, self).__init__(models.Process, session)
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ProcessModelView(session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
| from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ModelView(models.Process, session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
Sort the processes by start time by default in the Admin.from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
class ProcessModelView(ModelView):
# Make the latest first
column_default_sort = ('start_time', True)
def __init__(self, session):
super(ProcessModelView, self).__init__(models.Process, session)
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ProcessModelView(session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
| <commit_before>from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ModelView(models.Process, session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
<commit_msg>Sort the processes by start time by default in the Admin.<commit_after>from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from dadd.master import models
class ProcessModelView(ModelView):
# Make the latest first
column_default_sort = ('start_time', True)
def __init__(self, session):
super(ProcessModelView, self).__init__(models.Process, session)
def admin(app):
admin = Admin(app)
session = models.db.session
admin.add_view(ProcessModelView(session))
admin.add_view(ModelView(models.Host, session))
admin.add_view(ModelView(models.Logfile, session))
|
1fdcb2a94f4af2357c24d528ecdba356074d4101 | database_setup.py | database_setup.py | import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('app/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
| import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('meetup_facebook_bot/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
| Fix the path to fake database data | Fix the path to fake database data
| Python | mit | Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot | import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('app/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
Fix the path to fake database data | import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('meetup_facebook_bot/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
| <commit_before>import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('app/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
<commit_msg>Fix the path to fake database data<commit_after> | import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('meetup_facebook_bot/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
| import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('app/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
Fix the path to fake database dataimport json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('meetup_facebook_bot/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
| <commit_before>import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('app/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
<commit_msg>Fix the path to fake database data<commit_after>import json
from meetup_facebook_bot import server
from meetup_facebook_bot.models import base, talk, speaker
base.Base.metadata.create_all(bind=server.engine)
session = server.Session()
# This part of the script provides the app with mockup data
# TODO: replace it with actually working method
json_talks = []
with open('meetup_facebook_bot/example_talks.json') as json_file:
json_talks = json.load(json_file)
for fake_facebook_id, json_talk in enumerate(json_talks):
fake_speaker = speaker.Speaker(facebook_id=fake_facebook_id, name=json_talk['speaker'])
fake_talk = talk.Talk(
title=json_talk['title'],
description=json_talk['description'],
speaker_facebook_id=fake_speaker.facebook_id
)
session.add(fake_speaker)
session.add(fake_talk)
session.commit()
print('DB created!')
|
f5c5c9297bb5c7dfa0ee6c3077329c8e7fa00b06 | talon_one/exceptions.py | talon_one/exceptions.py | import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
super(TalonOneAPIError, self).__init__(self.message, *args)
| import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
if "message" in hints:
self.message += " - %s" % hints["message"]
super(TalonOneAPIError, self).__init__(self.message, *args)
| Include API validation response into `TalonOneAPIError` details | Include API validation response into `TalonOneAPIError` details
| Python | mit | talon-one/talon_one.py,talon-one/talon_one.py | import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
super(TalonOneAPIError, self).__init__(self.message, *args)
Include API validation response into `TalonOneAPIError` details | import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
if "message" in hints:
self.message += " - %s" % hints["message"]
super(TalonOneAPIError, self).__init__(self.message, *args)
| <commit_before>import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
super(TalonOneAPIError, self).__init__(self.message, *args)
<commit_msg>Include API validation response into `TalonOneAPIError` details<commit_after> | import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
if "message" in hints:
self.message += " - %s" % hints["message"]
super(TalonOneAPIError, self).__init__(self.message, *args)
| import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
super(TalonOneAPIError, self).__init__(self.message, *args)
Include API validation response into `TalonOneAPIError` detailsimport json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
if "message" in hints:
self.message += " - %s" % hints["message"]
super(TalonOneAPIError, self).__init__(self.message, *args)
| <commit_before>import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
super(TalonOneAPIError, self).__init__(self.message, *args)
<commit_msg>Include API validation response into `TalonOneAPIError` details<commit_after>import json
import requests
class TalonOneAPIError(Exception):
"""
TalonOne API Exceptions
"""
def __init__(self, message, *args):
self.message = message
# try to enhance with detailed error from API
if len(args) > 0 and isinstance(args[0], requests.exceptions.HTTPError):
hints = json.loads(args[0].response.text)
if "errors" in hints:
self.message += " - %s" % hints["errors"][0]["title"]
if "message" in hints:
self.message += " - %s" % hints["message"]
super(TalonOneAPIError, self).__init__(self.message, *args)
|
30e050b836a4e8df3f918738b78552a070a1a176 | jarbas/core/views.py | jarbas/core/views.py | from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
| from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
| Add blank line - pep8 | Add blank line - pep8
| Python | mit | datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/jarbas,datasciencebr/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/jarbas | from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
Add blank line - pep8 | from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
| <commit_before>from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
<commit_msg>Add blank line - pep8<commit_after> | from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
| from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
Add blank line - pep8from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
| <commit_before>from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
<commit_msg>Add blank line - pep8<commit_after>from django.shortcuts import get_object_or_404
from rest_framework.generics import RetrieveAPIView
from jarbas.core.models import Company
from jarbas.core.serializers import CompanySerializer
from jarbas.chamber_of_deputies.serializers import format_cnpj
class CompanyDetailView(RetrieveAPIView):
lookup_field = 'cnpj'
queryset = Company.objects.all()
serializer_class = CompanySerializer
def get_object(self):
cnpj = self.kwargs.get(self.lookup_field, '00000000000000')
return get_object_or_404(Company, cnpj=format_cnpj(cnpj))
|
6fecc53b63023e6d25722aa66038285be3b4d46b | arcutils/response.py | arcutils/response.py | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = (
request.GET.get(redirect_field_name) or
request.POST.get(redirect_field_name) or
request.META.get('HTTP_REFERER')
)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
return location
| from urllib.parse import urlparse, urlunparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = request.GET.get(redirect_field_name) or request.POST.get(redirect_field_name)
if location:
from_referrer = False
else:
location = request.META.get('HTTP_REFERER')
from_referrer = bool(location)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
elif from_referrer:
info = urlparse(location)
if info.netloc == host:
# Clear scheme and host (AKA netloc) to get just the path of
# the referrer. Also, ensure the path is set for consistency.
new_info = ('', '', info.path or '/') + info[3:]
location = urlunparse(new_info)
return location
| Return just path when getting redirect location from REFERER | Return just path when getting redirect location from REFERER
In response.get_redirect_location(). There's no need to include the
scheme and host in a redirect back to the same site. Removing them makes
redirect URLs more concise.
| Python | mit | PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,wylee/django-arcutils | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = (
request.GET.get(redirect_field_name) or
request.POST.get(redirect_field_name) or
request.META.get('HTTP_REFERER')
)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
return location
Return just path when getting redirect location from REFERER
In response.get_redirect_location(). There's no need to include the
scheme and host in a redirect back to the same site. Removing them makes
redirect URLs more concise. | from urllib.parse import urlparse, urlunparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = request.GET.get(redirect_field_name) or request.POST.get(redirect_field_name)
if location:
from_referrer = False
else:
location = request.META.get('HTTP_REFERER')
from_referrer = bool(location)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
elif from_referrer:
info = urlparse(location)
if info.netloc == host:
# Clear scheme and host (AKA netloc) to get just the path of
# the referrer. Also, ensure the path is set for consistency.
new_info = ('', '', info.path or '/') + info[3:]
location = urlunparse(new_info)
return location
| <commit_before>from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = (
request.GET.get(redirect_field_name) or
request.POST.get(redirect_field_name) or
request.META.get('HTTP_REFERER')
)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
return location
<commit_msg>Return just path when getting redirect location from REFERER
In response.get_redirect_location(). There's no need to include the
scheme and host in a redirect back to the same site. Removing them makes
redirect URLs more concise.<commit_after> | from urllib.parse import urlparse, urlunparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = request.GET.get(redirect_field_name) or request.POST.get(redirect_field_name)
if location:
from_referrer = False
else:
location = request.META.get('HTTP_REFERER')
from_referrer = bool(location)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
elif from_referrer:
info = urlparse(location)
if info.netloc == host:
# Clear scheme and host (AKA netloc) to get just the path of
# the referrer. Also, ensure the path is set for consistency.
new_info = ('', '', info.path or '/') + info[3:]
location = urlunparse(new_info)
return location
| from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = (
request.GET.get(redirect_field_name) or
request.POST.get(redirect_field_name) or
request.META.get('HTTP_REFERER')
)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
return location
Return just path when getting redirect location from REFERER
In response.get_redirect_location(). There's no need to include the
scheme and host in a redirect back to the same site. Removing them makes
redirect URLs more concise.from urllib.parse import urlparse, urlunparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = request.GET.get(redirect_field_name) or request.POST.get(redirect_field_name)
if location:
from_referrer = False
else:
location = request.META.get('HTTP_REFERER')
from_referrer = bool(location)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
elif from_referrer:
info = urlparse(location)
if info.netloc == host:
# Clear scheme and host (AKA netloc) to get just the path of
# the referrer. Also, ensure the path is set for consistency.
new_info = ('', '', info.path or '/') + info[3:]
location = urlunparse(new_info)
return location
| <commit_before>from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = (
request.GET.get(redirect_field_name) or
request.POST.get(redirect_field_name) or
request.META.get('HTTP_REFERER')
)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
return location
<commit_msg>Return just path when getting redirect location from REFERER
In response.get_redirect_location(). There's no need to include the
scheme and host in a redirect back to the same site. Removing them makes
redirect URLs more concise.<commit_after>from urllib.parse import urlparse, urlunparse
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.http import is_safe_url
def get_redirect_location(request, redirect_field_name=REDIRECT_FIELD_NAME, default='/'):
"""Attempt to choose an optimal redirect location.
If a location is specified via a request parameter, that location
will be used.
If a location is specified via POST or PUT data, that location will
be used.
In either of the above two cases, the name of the parameter and data
field is specified by ``redirect_field_name``, which defaults to
"next".
Otherwise, the preferred option is to redirect back to the referring
page.
If there's no referrer, the default is used.
In any case, the redirect location must be safe (same host, safe
scheme). Otherwise, the ``default`` location will be used. If the
default location isn't safe, "/" will be used as a last resort.
"""
host = request.get_host()
location = request.GET.get(redirect_field_name) or request.POST.get(redirect_field_name)
if location:
from_referrer = False
else:
location = request.META.get('HTTP_REFERER')
from_referrer = bool(location)
if not is_safe_url(location, host):
default = default or '/'
if not is_safe_url(default, host):
default = '/'
location = default
elif from_referrer:
info = urlparse(location)
if info.netloc == host:
# Clear scheme and host (AKA netloc) to get just the path of
# the referrer. Also, ensure the path is set for consistency.
new_info = ('', '', info.path or '/') + info[3:]
location = urlunparse(new_info)
return location
|
9d8b648163ede522eed6d9b742e8e7393dc2f6dc | vispy/testing/__init__.py | vispy/testing/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
| # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
| Document test system a bit | DOC: Document test system a bit
| Python | bsd-3-clause | Eric89GXL/vispy,michaelaye/vispy,Eric89GXL/vispy,drufat/vispy,dchilds7/Deysha-Star-Formation,bollu/vispy,jdreaver/vispy,jdreaver/vispy,bollu/vispy,sbtlaarzc/vispy,jay3sh/vispy,inclement/vispy,sh4wn/vispy,drufat/vispy,hronoses/vispy,bollu/vispy,julienr/vispy,ghisvail/vispy,sbtlaarzc/vispy,kkuunnddaannkk/vispy,sh4wn/vispy,sh4wn/vispy,jay3sh/vispy,Eric89GXL/vispy,QuLogic/vispy,michaelaye/vispy,ghisvail/vispy,RebeccaWPerry/vispy,QuLogic/vispy,drufat/vispy,ghisvail/vispy,srinathv/vispy,hronoses/vispy,jdreaver/vispy,jay3sh/vispy,sbtlaarzc/vispy,julienr/vispy,RebeccaWPerry/vispy,michaelaye/vispy,inclement/vispy,dchilds7/Deysha-Star-Formation,julienr/vispy,kkuunnddaannkk/vispy,srinathv/vispy,RebeccaWPerry/vispy,srinathv/vispy,inclement/vispy,kkuunnddaannkk/vispy,QuLogic/vispy,dchilds7/Deysha-Star-Formation,hronoses/vispy | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
DOC: Document test system a bit | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
<commit_msg>DOC: Document test system a bit<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
| # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
DOC: Document test system a bit# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
<commit_msg>DOC: Document test system a bit<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Testing
=======
This module provides functions useful for running tests in vispy.
Tests can be run in a few ways:
* From Python, you can import ``vispy`` and do ``vispy.test()``.
* From the source root, you can do ``make test`` which wraps to
a call to ``python make test``.
There are various diffrent testing "modes", including:
* "full": run all tests.
* any backend name (e.g., "glfw"): run application/GL tests using a
specific backend.
* "nobackend": run tests that do not require a backend.
* "examples": run repo examples to check for errors and warnings.
* "flake": check style errors.
Examples get automatically tested unless they have a special comment toward
the top ``# vispy: testskip``. Examples that should be tested should be
formatted so that 1) a ``Canvas`` class is defined, or a ``canvas`` class
is instantiated; and 2) the ``app.run()`` call is protected by a check
if ``__name__ == "__main__"``. This makes it so that the event loop is not
started when running examples in the test suite -- the test suite instead
manually updates the canvas (using ``app.process_events()``) for under one
second to ensure that things like timer events are processed.
For examples on how to test various bits of functionality (e.g., application
functionality, or drawing things with OpenGL), it's best to look at existing
examples in the test suite.
The code base gets automatically tested by Travis-CI (Linux) and AppVeyor
(Windows) on Python 2.6, 2.7, 3.4. There are multiple testing modes that
use e.g. full dependencies, minimal dependencies, etc. See ``.travis.yml``
to determine what automatic tests are run.
"""
from ._testing import (SkipTest, requires_application, requires_img_lib, # noqa
has_backend, requires_pyopengl, # noqa
requires_scipy, has_matplotlib, # noqa
save_testing_image, TestingCanvas, has_pyopengl, # noqa
run_tests_if_main, assert_image_equal,
assert_is, assert_in, assert_not_in, assert_equal,
assert_not_equal, assert_raises, assert_true) # noqa
from ._runners import test # noqa
|
92a44ab657cf56dc6effd4fa841187d88ead879b | piper/__init__.py | piper/__init__.py | import sys
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
piper.setup()
piper.execute()
| import sys
import jsonschema
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
try:
piper.setup()
piper.execute()
except jsonschema.exceptions.ValidationError as e:
print(e)
raise
| Implement very simple main executor | Implement very simple main executor
| Python | mit | thiderman/piper | import sys
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
piper.setup()
piper.execute()
Implement very simple main executor | import sys
import jsonschema
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
try:
piper.setup()
piper.execute()
except jsonschema.exceptions.ValidationError as e:
print(e)
raise
| <commit_before>import sys
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
piper.setup()
piper.execute()
<commit_msg>Implement very simple main executor<commit_after> | import sys
import jsonschema
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
try:
piper.setup()
piper.execute()
except jsonschema.exceptions.ValidationError as e:
print(e)
raise
| import sys
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
piper.setup()
piper.execute()
Implement very simple main executorimport sys
import jsonschema
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
try:
piper.setup()
piper.execute()
except jsonschema.exceptions.ValidationError as e:
print(e)
raise
| <commit_before>import sys
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
piper.setup()
piper.execute()
<commit_msg>Implement very simple main executor<commit_after>import sys
import jsonschema
from piper.core import Piper
def main():
# TODO: dat argparse
env_key = sys.argv[1]
set_key = sys.argv[2]
piper = Piper(env_key, set_key)
try:
piper.setup()
piper.execute()
except jsonschema.exceptions.ValidationError as e:
print(e)
raise
|
e4297f0f7149763f6e93536746e3c87f9d1fa699 | tests/test_watchmedo.py | tests/test_watchmedo.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
import yaml
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
with pytest.raises(yaml.constructor.ConstructorError):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
from yaml.constructor import ConstructorError
from yaml.scanner import ScannerError
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
# PyYAML get_single_data() raises different exceptions for Linux and Windows
with pytest.raises((ConstructorError, ScannerError)):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
| Fix watchmedo tests in Windows | Fix watchmedo tests in Windows
Unexpectedly, but on test_load_config_invalid running, PyYAML get_single_data() raises different execptions for Linux and Windows. This PR adds ScannerError for passing tests under Windows. | Python | apache-2.0 | gorakhargosh/watchdog,gorakhargosh/watchdog | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
import yaml
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
with pytest.raises(yaml.constructor.ConstructorError):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
Fix watchmedo tests in Windows
Unexpectedly, but on test_load_config_invalid running, PyYAML get_single_data() raises different execptions for Linux and Windows. This PR adds ScannerError for passing tests under Windows. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
from yaml.constructor import ConstructorError
from yaml.scanner import ScannerError
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
# PyYAML get_single_data() raises different exceptions for Linux and Windows
with pytest.raises((ConstructorError, ScannerError)):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
import yaml
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
with pytest.raises(yaml.constructor.ConstructorError):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
<commit_msg>Fix watchmedo tests in Windows
Unexpectedly, but on test_load_config_invalid running, PyYAML get_single_data() raises different execptions for Linux and Windows. This PR adds ScannerError for passing tests under Windows.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
from yaml.constructor import ConstructorError
from yaml.scanner import ScannerError
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
# PyYAML get_single_data() raises different exceptions for Linux and Windows
with pytest.raises((ConstructorError, ScannerError)):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
import yaml
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
with pytest.raises(yaml.constructor.ConstructorError):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
Fix watchmedo tests in Windows
Unexpectedly, but on test_load_config_invalid running, PyYAML get_single_data() raises different execptions for Linux and Windows. This PR adds ScannerError for passing tests under Windows.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
from yaml.constructor import ConstructorError
from yaml.scanner import ScannerError
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
# PyYAML get_single_data() raises different exceptions for Linux and Windows
with pytest.raises((ConstructorError, ScannerError)):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
import yaml
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
with pytest.raises(yaml.constructor.ConstructorError):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
<commit_msg>Fix watchmedo tests in Windows
Unexpectedly, but on test_load_config_invalid running, PyYAML get_single_data() raises different execptions for Linux and Windows. This PR adds ScannerError for passing tests under Windows.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from watchdog import watchmedo
import pytest
from yaml.constructor import ConstructorError
from yaml.scanner import ScannerError
import os
def test_load_config_valid(tmpdir):
"""Verifies the load of a valid yaml file"""
yaml_file = os.path.join(tmpdir, 'config_file.yaml')
with open(yaml_file, 'w') as f:
f.write('one: value\ntwo:\n- value1\n- value2\n')
config = watchmedo.load_config(yaml_file)
assert isinstance(config, dict)
assert 'one' in config
assert 'two' in config
assert isinstance(config['two'], list)
assert config['one'] == 'value'
assert config['two'] == ['value1', 'value2']
def test_load_config_invalid(tmpdir):
"""Verifies if safe load avoid the execution
of untrusted code inside yaml files"""
critical_dir = os.path.join(tmpdir, 'critical')
yaml_file = os.path.join(tmpdir, 'tricks_file.yaml')
with open(yaml_file, 'w') as f:
content = (
'one: value\n'
'run: !!python/object/apply:os.system ["mkdir {}"]\n'
).format(critical_dir)
f.write(content)
# PyYAML get_single_data() raises different exceptions for Linux and Windows
with pytest.raises((ConstructorError, ScannerError)):
watchmedo.load_config(yaml_file)
assert not os.path.exists(critical_dir)
|
5076055b54d18ea2441abaf604a4ea4dd79353c5 | cybox/test/objects/__init__.py | cybox/test/objects/__init__.py | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
| import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| Expand default testing on new object types | Expand default testing on new object types
| Python | bsd-3-clause | CybOXProject/python-cybox | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
Expand default testing on new object types | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| <commit_before>import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
<commit_msg>Expand default testing on new object types<commit_after> | import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
Expand default testing on new object typesimport cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
| <commit_before>import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECTS
# dictionary in cybox.utils
print(type(self))
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
c = self.__class__.klass
self.assertEqual(cybox.utils.get_class_for_object_type(t), c)
<commit_msg>Expand default testing on new object types<commit_after>import cybox.utils
class ObjectTestCase(object):
"""A base class for testing all subclasses of ObjectProperties.
Each subclass of ObjectTestCase should subclass both unittest.TestCase
and ObjectTestCase, and defined two class-level fields:
- klass: the ObjectProperties subclass being tested
- object_type: The name prefix used in the XML Schema bindings for the
object.
"""
def test_type_exists(self):
# Verify that the correct class has been added to the OBJECT_TYPES_DICT
# dictionary in cybox.utils.nsparser
# Skip this base class
if type(self) == type(ObjectTestCase):
return
t = self.__class__.object_type
expected_class = cybox.utils.get_class_for_object_type(t)
actual_class = self.__class__.klass
self.assertEqual(expected_class, actual_class)
expected_namespace = expected_class._XSI_NS
actual_namespace = cybox.utils.nsparser.OBJECT_TYPES_DICT.get(t).get('namespace_prefix')
self.assertEqual(expected_namespace, actual_namespace)
self.assertEqual(expected_class._XSI_TYPE, t)
|
7aff5878747c000c5868e3a5ddd8b205d74770b0 | thinc/extra/load_nlp.py | thinc/extra/load_nlp.py | import numpy
try:
import spacy
except ImportError:
spacy = None
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
if spacy is None:
raise ImportError("Could not import spacy. Is it installed?")
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| Improve import of spaCy, to prevent cycles | Improve import of spaCy, to prevent cycles
| Python | mit | explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc | import numpy
try:
import spacy
except ImportError:
spacy = None
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
if spacy is None:
raise ImportError("Could not import spacy. Is it installed?")
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Improve import of spaCy, to prevent cycles | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| <commit_before>import numpy
try:
import spacy
except ImportError:
spacy = None
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
if spacy is None:
raise ImportError("Could not import spacy. Is it installed?")
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Improve import of spaCy, to prevent cycles<commit_after> | import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| import numpy
try:
import spacy
except ImportError:
spacy = None
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
if spacy is None:
raise ImportError("Could not import spacy. Is it installed?")
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Improve import of spaCy, to prevent cyclesimport numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
| <commit_before>import numpy
try:
import spacy
except ImportError:
spacy = None
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
if spacy is None:
raise ImportError("Could not import spacy. Is it installed?")
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Improve import of spaCy, to prevent cycles<commit_after>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
486e2fc3f2db9afabaf0dd279be0675439304d83 | app/__init__.py | app/__init__.py | from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
| from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'supersectetsessionrequiredkeyforsecurityreasons'
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
| Add secret key for session use | Add secret key for session use
| Python | mit | jawrainey/healthchat,jawrainey/healthchat | from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
Add secret key for session use | from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'supersectetsessionrequiredkeyforsecurityreasons'
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
| <commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
<commit_msg>Add secret key for session use<commit_after> | from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'supersectetsessionrequiredkeyforsecurityreasons'
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
| from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
Add secret key for session usefrom flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'supersectetsessionrequiredkeyforsecurityreasons'
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
| <commit_before>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
<commit_msg>Add secret key for session use<commit_after>from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.socketio import SocketIO
import os
# The main application folder
_basedir = os.path.abspath(os.path.dirname('..'))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(_basedir, 'test.db')
app = Flask(__name__)
app.debug = True
app.config['SECRET_KEY'] = 'supersectetsessionrequiredkeyforsecurityreasons'
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
socketio = SocketIO(app)
db = SQLAlchemy(app)
from app import views, models
|
0cf18a615ef671e6bee44da386016d8ab1b8b012 | Generator.py | Generator.py | import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
| import random
import re
def generateWord(meaning, form, categories, settings, phonotactics,
formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
word = applyPhonotactics(word, phonotactics)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
def applyPhonotactics(word, phonotactics):
'''Takes dictionary of phonotactics rules and a word. Returns
word with all rules applied.'''
for name, rule in phonotactics.items():
print("Applying rule: " + name)
r = rule.split("->")
word = re.sub(r[0], r[1], word)
return word
| Add applyPhonotactics() and use it in generation | Add applyPhonotactics() and use it in generation
| Python | mit | kdelwat/Lexeme | import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
Add applyPhonotactics() and use it in generation | import random
import re
def generateWord(meaning, form, categories, settings, phonotactics,
formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
word = applyPhonotactics(word, phonotactics)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
def applyPhonotactics(word, phonotactics):
'''Takes dictionary of phonotactics rules and a word. Returns
word with all rules applied.'''
for name, rule in phonotactics.items():
print("Applying rule: " + name)
r = rule.split("->")
word = re.sub(r[0], r[1], word)
return word
| <commit_before>import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
<commit_msg>Add applyPhonotactics() and use it in generation<commit_after> | import random
import re
def generateWord(meaning, form, categories, settings, phonotactics,
formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
word = applyPhonotactics(word, phonotactics)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
def applyPhonotactics(word, phonotactics):
'''Takes dictionary of phonotactics rules and a word. Returns
word with all rules applied.'''
for name, rule in phonotactics.items():
print("Applying rule: " + name)
r = rule.split("->")
word = re.sub(r[0], r[1], word)
return word
| import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
Add applyPhonotactics() and use it in generationimport random
import re
def generateWord(meaning, form, categories, settings, phonotactics,
formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
word = applyPhonotactics(word, phonotactics)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
def applyPhonotactics(word, phonotactics):
'''Takes dictionary of phonotactics rules and a word. Returns
word with all rules applied.'''
for name, rule in phonotactics.items():
print("Applying rule: " + name)
r = rule.split("->")
word = re.sub(r[0], r[1], word)
return word
| <commit_before>import random
def generateWord(meaning, form, categories, settings, formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
<commit_msg>Add applyPhonotactics() and use it in generation<commit_after>import random
import re
def generateWord(meaning, form, categories, settings, phonotactics,
formrules=None):
'''Takes an English string, desired form, generation
categories, settings, and optional form-specific rules.
Returns a generated word.
'''
word = ""
minS = settings["minS"]
maxS = settings["maxS"]
defaultrule = settings["rule"]
if formrules is not None:
if form in formrules:
rule = formrules[form]
else:
rule = defaultrule
else:
rule = defaultrule
for syllable in range(random.randint(minS, maxS)):
word += generateSyllable(categories, rule)
word = applyPhonotactics(word, phonotactics)
return {'english': meaning, 'word': word, 'form': form}
def generateSyllable(categories, rule):
'''Takes a category dictionary and a rule. Returns a
generated syllable.
'''
syllable = ""
for place in rule:
if isinstance(place, str):
syllable += random.choice(categories[place])
else:
x = random.choice(place)
if x is not None:
syllable += random.choice(categories[x])
return syllable
def applyPhonotactics(word, phonotactics):
'''Takes dictionary of phonotactics rules and a word. Returns
word with all rules applied.'''
for name, rule in phonotactics.items():
print("Applying rule: " + name)
r = rule.split("->")
word = re.sub(r[0], r[1], word)
return word
|
f97b5ec83601430ae63ac6c0a6e651cc7a0cf90d | project/encode.py | project/encode.py | from msgpack import packb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(data, default=encode_btree)
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
unpacker = Unpacker(data, object_hook=decode_btree)
return(next(unpacker))
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
| from msgpack import packb, unpackb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
from checksum import add_integrity, check_integrity
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(compress(add_integrity(packb(data, default=encode_btree))))
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
data = decompress(next(Unpacker(data)))
return unpackb(check_integrity(data), object_hook=decode_btree)
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
| Add compression and integrity checks | Add compression and integrity checks
| Python | mit | Snuggert/moda | from msgpack import packb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(data, default=encode_btree)
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
unpacker = Unpacker(data, object_hook=decode_btree)
return(next(unpacker))
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
Add compression and integrity checks | from msgpack import packb, unpackb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
from checksum import add_integrity, check_integrity
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(compress(add_integrity(packb(data, default=encode_btree))))
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
data = decompress(next(Unpacker(data)))
return unpackb(check_integrity(data), object_hook=decode_btree)
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
| <commit_before>from msgpack import packb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(data, default=encode_btree)
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
unpacker = Unpacker(data, object_hook=decode_btree)
return(next(unpacker))
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
<commit_msg>Add compression and integrity checks<commit_after> | from msgpack import packb, unpackb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
from checksum import add_integrity, check_integrity
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(compress(add_integrity(packb(data, default=encode_btree))))
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
data = decompress(next(Unpacker(data)))
return unpackb(check_integrity(data), object_hook=decode_btree)
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
| from msgpack import packb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(data, default=encode_btree)
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
unpacker = Unpacker(data, object_hook=decode_btree)
return(next(unpacker))
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
Add compression and integrity checksfrom msgpack import packb, unpackb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
from checksum import add_integrity, check_integrity
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(compress(add_integrity(packb(data, default=encode_btree))))
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
data = decompress(next(Unpacker(data)))
return unpackb(check_integrity(data), object_hook=decode_btree)
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
| <commit_before>from msgpack import packb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(data, default=encode_btree)
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
unpacker = Unpacker(data, object_hook=decode_btree)
return(next(unpacker))
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
<commit_msg>Add compression and integrity checks<commit_after>from msgpack import packb, unpackb, Unpacker
from snappy import compress, decompress # noqa
from btree import Tree, Node, Leaf, LazyNode
from checksum import add_integrity, check_integrity
def encode_btree(obj):
if isinstance(obj, (Tree, Node, Leaf)):
return {'__class__': obj.__class__.__name__,
'data': obj.to_json()}
elif isinstance(obj, LazyNode):
return obj.offset
return obj
def encode(data):
return packb(compress(add_integrity(packb(data, default=encode_btree))))
def decode(data, tree):
def decode_btree(obj):
if b'__class__' in obj:
cls_name = obj[b'__class__'].decode()
data = obj[b'data']
if cls_name == 'Leaf':
obj = Leaf(tree, bucket=bucket_to_lazynodes(data, tree))
elif cls_name == 'Node':
bucket = bucket_to_lazynodes(data[b'bucket'], tree)
obj = Node(tree, bucket=bucket,
rest=LazyNode(offset=data[b'rest'], tree=tree))
else:
tree.max_size = data[b'max_size']
tree.root = LazyNode(offset=data[b'root'], tree=tree)
return tree
return obj
data = decompress(next(Unpacker(data)))
return unpackb(check_integrity(data), object_hook=decode_btree)
def bucket_to_lazynodes(bucket, tree):
return {k: LazyNode(offset=v, tree=tree) for k, v in bucket.items()}
|
7cc968f90407745b84bd2f663e5f64b9c0923605 | project/manage.py | project/manage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Use full path in case the working dir is not the same | Use full path in case the working dir is not the same
| Python | mit | hacklab-fi/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,rambo/asylum,jautero/asylum,rambo/asylum,rambo/asylum,rambo/asylum,jautero/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use full path in case the working dir is not the same | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use full path in case the working dir is not the same<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use full path in case the working dir is not the same#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
if __name__ == "__main__":
if os.path.isfile('.env'):
environ.Env.read_env('.env')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use full path in case the working dir is not the same<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import environ
ROOT_DIR = environ.Path(__file__) - 1
if __name__ == "__main__":
if os.path.isfile(str(ROOT_DIR + '.env')):
environ.Env.read_env(str(ROOT_DIR + '.env'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
fff1312d506b4268be2e2ce7c333d9f25babdd59 | fuzzyfinder/main.py | fuzzyfinder/main.py | # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
| # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('%s' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
| Remove the unnecessary capturing group. | Remove the unnecessary capturing group.
| Python | bsd-3-clause | harrisonfeng/fuzzyfinder,amjith/fuzzyfinder,adammenges/fuzzyfinder | # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
Remove the unnecessary capturing group. | # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('%s' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
| <commit_before># -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
<commit_msg>Remove the unnecessary capturing group.<commit_after> | # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('%s' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
| # -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
Remove the unnecessary capturing group.# -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('%s' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
| <commit_before># -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
<commit_msg>Remove the unnecessary capturing group.<commit_after># -*- coding: utf-8 -*-
import re
from . import export
@export
def fuzzyfinder(text, collection):
"""
Args:
text (str): A partial string which is typically entered by a user.
collection (iterable): A collection of strings which will be filtered
based on the input `text`.
Returns:
suggestions (generator): A generator object that produces a list of
suggestions narrowed down from `collections` using the `text`
input.
"""
suggestions = []
regex = '.*?'.join(map(re.escape, text))
pat = re.compile('%s' % regex)
for item in sorted(collection):
r = pat.search(item)
if r:
suggestions.append((len(r.group()), r.start(), item))
return (z for _, _, z in sorted(suggestions))
|
d33d791a5e90ab1a389d85b5e93df7f07167eb5b | tests/integration/test_home_page.py | tests/integration/test_home_page.py | from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
| from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
| Update tests to reflect title change | Update tests to reflect title change
| Python | isc | textbook/flash,textbook/flash,textbook/flash | from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
Update tests to reflect title change | from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
| <commit_before>from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
<commit_msg>Update tests to reflect title change<commit_after> | from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
| from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
Update tests to reflect title changefrom flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
| <commit_before>from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flash - Flask Dashboard'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
<commit_msg>Update tests to reflect title change<commit_after>from flask import url_for
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from tests.helpers import slow
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_accessible(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(expected_conditions.title_is(
'Flask + Dashboard = Flash'
))
assert selenium.find_element(By.CLASS_NAME, 'headline').text == 'PROJECT GNOME'
@pytest.mark.usefixtures('live_server')
@slow
def test_home_page_contains_tracker_dashboard(selenium):
go_to_home_page(selenium)
WebDriverWait(selenium, 5).until(
expected_conditions.presence_of_element_located(
(By.CLASS_NAME, 'tracker-pane')
)
)
def go_to_home_page(selenium):
selenium.get(url_for('home', _external=True))
|
c28f0313a96be4f4095f542ba2a9524c129c88f7 | luigi_td/__init__.py | luigi_td/__init__.py | from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config'
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget'
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
| from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config',
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget',
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
| Fix variable names in __all__ | Fix variable names in __all__
| Python | apache-2.0 | treasure-data/luigi-td | from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config'
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget'
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
Fix variable names in __all__ | from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config',
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget',
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
| <commit_before>from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config'
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget'
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
<commit_msg>Fix variable names in __all__<commit_after> | from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config',
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget',
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
| from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config'
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget'
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
Fix variable names in __all__from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config',
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget',
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
| <commit_before>from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config'
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget'
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
<commit_msg>Fix variable names in __all__<commit_after>from luigi_td.bulk_import import BulkImport
from luigi_td.client import ResultProxy
from luigi_td.config import Config, get_config
from luigi_td.task import DatabaseTask, TableTask, Query
from luigi_td.targets.result import ResultTarget
from luigi_td.targets.s3 import S3ResultTarget
from luigi_td.targets.tableau import TableauServerResultTarget, TableauOnlineResultTarget
from luigi_td.targets.td import DatabaseTarget, TableTarget, SchemaError
__all__ = [
# bulk_import
'BulkImport',
# client
'ResultProxy',
# config
'Config',
'get_config',
# task
'DatabaseTask',
'TableTask',
'Query',
# targets.result
'ResultTarget',
# targets.s3
'S3ResultTarget',
# targets.tableau
'TableauServerResultTarget',
'TableauOnlineResultTarget',
# targets.td
'DatabaseTarget',
'TableTarget',
'SchemaError',
]
|
24d148e33218a3f08a56a95052b0cfe8ecb0cecd | pykka/__init__.py | pykka/__init__.py | """
Pykka is a concurrency abstraction which makes actors look like regular
objects.
See http://jodal.github.com/pykka/ for more information.
"""
from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
| from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
| Remove docstring which only repeats the readme | Remove docstring which only repeats the readme
| Python | apache-2.0 | jodal/pykka,tamland/pykka,tempbottle/pykka | """
Pykka is a concurrency abstraction which makes actors look like regular
objects.
See http://jodal.github.com/pykka/ for more information.
"""
from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
Remove docstring which only repeats the readme | from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
| <commit_before>"""
Pykka is a concurrency abstraction which makes actors look like regular
objects.
See http://jodal.github.com/pykka/ for more information.
"""
from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
<commit_msg>Remove docstring which only repeats the readme<commit_after> | from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
| """
Pykka is a concurrency abstraction which makes actors look like regular
objects.
See http://jodal.github.com/pykka/ for more information.
"""
from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
Remove docstring which only repeats the readmefrom pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
| <commit_before>"""
Pykka is a concurrency abstraction which makes actors look like regular
objects.
See http://jodal.github.com/pykka/ for more information.
"""
from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
<commit_msg>Remove docstring which only repeats the readme<commit_after>from pykka.actor import Actor
from pykka.future import Future, get_all, wait_all
from pykka.proxy import ActorProxy, CallableProxy
from pykka.registry import ActorRegistry
__all__ = ['Actor', 'ActorProxy', 'ActorRegistry', 'CallableProxy', 'Future',
'get_all', 'wait_all']
VERSION = (0, 4)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if len(VERSION) > 2:
version = '%s.%s' % (version, VERSION[2])
return version
|
b74971eaf180c14fef68142bffc689b1bc7340f4 | approvaltests/Namer.py | approvaltests/Namer.py | import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return self.Directory + "\\" + self.ClassName + "." + self.MethodName
| import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return os.path.join(self.Directory, self.ClassName + "." + self.MethodName)
| Create approval file path that works on Linux and Windows. | Create approval file path that works on Linux and Windows.
| Python | apache-2.0 | approvals/ApprovalTests.Python,approvals/ApprovalTests.Python,approvals/ApprovalTests.Python,tdpreece/ApprovalTests.Python | import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return self.Directory + "\\" + self.ClassName + "." + self.MethodName
Create approval file path that works on Linux and Windows. | import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return os.path.join(self.Directory, self.ClassName + "." + self.MethodName)
| <commit_before>import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return self.Directory + "\\" + self.ClassName + "." + self.MethodName
<commit_msg>Create approval file path that works on Linux and Windows.<commit_after> | import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return os.path.join(self.Directory, self.ClassName + "." + self.MethodName)
| import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return self.Directory + "\\" + self.ClassName + "." + self.MethodName
Create approval file path that works on Linux and Windows.import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return os.path.join(self.Directory, self.ClassName + "." + self.MethodName)
| <commit_before>import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return self.Directory + "\\" + self.ClassName + "." + self.MethodName
<commit_msg>Create approval file path that works on Linux and Windows.<commit_after>import inspect
import os
class Namer(object):
ClassName = ''
MethodName = ''
Directory = ''
def setForStack(self, caller):
stackFrame = caller[self.frame]
self.MethodName = stackFrame[3]
self.ClassName = stackFrame[0].f_globals["__name__"]
self.Directory = os.path.dirname(stackFrame[1])
def __init__(self, frame=1):
self.frame = frame
self.setForStack(inspect.stack(1))
def getClassName(self):
return self.ClassName
def getMethodName(self):
return self.MethodName
def getDirectory(self):
return self.Directory
def get_basename(self):
return os.path.join(self.Directory, self.ClassName + "." + self.MethodName)
|
832256925a08c0bbfe3f507ef613d51e11ac8103 | feature_extraction/extraction.py | feature_extraction/extraction.py | import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
| import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
| Add a function to normalize the feature vectors | Add a function to normalize the feature vectors
| Python | apache-2.0 | widoptimization-willett/feature-extraction | import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
Add a function to normalize the feature vectors | import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
| <commit_before>import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
<commit_msg>Add a function to normalize the feature vectors<commit_after> | import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
| import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
Add a function to normalize the feature vectorsimport numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
| <commit_before>import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
<commit_msg>Add a function to normalize the feature vectors<commit_after>import numpy as np
def extract_features(image, measurements):
"""
Given an image as a Numpy array and a set of measurement objects
implementing a compute method returning a feature vector, return a combined
feature vector.
"""
# TODO(liam): parallelize multiple measurements on an image by using Celery
return np.hstack([m.compute(image) for m in measurements])
def normalize_features(X):
# recenter features and normalize over the dataset
X -= np.mean(X, axis=0)
X /= np.linalg.norm(X, axis=0)
# normalize for each record
X /= np.vstack(np.linalg.norm(X, axis=1))
return X
|
9ea35a99c30f2ec7ed3946e71a286e689d2a50a3 | api/tests/test_signup.py | api/tests/test_signup.py | from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories, serializers
from api.models import User
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
| from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
REQUIRED_FIELD_ERROR = 'This field is required.'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
def test_signup_returns_errors_on_missing_required_fields(self):
request = self.factory.post('/api/signup/', {}, format='json')
response = signup(request)
data = response.data
print(data)
self.assertEqual(response.status_code, 400)
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['username'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['password'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['email'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['first_name'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['last_name'])
| Add test for errors, but User fields need to become required first | Add test for errors, but User fields need to become required first
| Python | mit | frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq | from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories, serializers
from api.models import User
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
Add test for errors, but User fields need to become required first | from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
REQUIRED_FIELD_ERROR = 'This field is required.'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
def test_signup_returns_errors_on_missing_required_fields(self):
request = self.factory.post('/api/signup/', {}, format='json')
response = signup(request)
data = response.data
print(data)
self.assertEqual(response.status_code, 400)
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['username'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['password'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['email'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['first_name'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['last_name'])
| <commit_before>from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories, serializers
from api.models import User
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
<commit_msg>Add test for errors, but User fields need to become required first<commit_after> | from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
REQUIRED_FIELD_ERROR = 'This field is required.'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
def test_signup_returns_errors_on_missing_required_fields(self):
request = self.factory.post('/api/signup/', {}, format='json')
response = signup(request)
data = response.data
print(data)
self.assertEqual(response.status_code, 400)
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['username'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['password'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['email'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['first_name'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['last_name'])
| from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories, serializers
from api.models import User
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
Add test for errors, but User fields need to become required firstfrom django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
REQUIRED_FIELD_ERROR = 'This field is required.'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
def test_signup_returns_errors_on_missing_required_fields(self):
request = self.factory.post('/api/signup/', {}, format='json')
response = signup(request)
data = response.data
print(data)
self.assertEqual(response.status_code, 400)
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['username'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['password'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['email'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['first_name'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['last_name'])
| <commit_before>from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories, serializers
from api.models import User
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
<commit_msg>Add test for errors, but User fields need to become required first<commit_after>from django.test import TestCase
from api.views.signup import signup
from rest_framework.test import APIRequestFactory
from api import factories
from api.serializers import UserSerializer
class SignupTest(TestCase):
PASSWORD = 'test'
REQUIRED_FIELD_ERROR = 'This field is required.'
def setUp(self):
self.factory = APIRequestFactory()
self.user = factories.UserFactory.build()
def test_signup_works(self):
serializer = UserSerializer(self.user)
request_data = serializer.data
request_data['password'] = self.PASSWORD
request_data['password_confirmation'] = self.PASSWORD
request = self.factory.post('/api/signup/', request_data, format='json')
response = signup(request)
new_user = response.data
self.assertEqual(response.status_code, 201)
self.assertEqual(new_user.username, self.user.username)
self.assertEqual(new_user.email, self.user.email)
self.assertEqual(new_user.first_name, self.user.first_name)
self.assertEqual(new_user.last_name, self.user.last_name)
def test_signup_returns_errors_on_missing_required_fields(self):
request = self.factory.post('/api/signup/', {}, format='json')
response = signup(request)
data = response.data
print(data)
self.assertEqual(response.status_code, 400)
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['username'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['password'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['email'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['first_name'])
self.assertTrue(self.REQUIRED_FIELD_ERROR in data['last_name'])
|
de9e8ab1a91e2a0e69971f9c23377f97e717b048 | app/__init__.py | app/__init__.py | from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
| import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
| Add additional application for gunicorn. | Add additional application for gunicorn.
| Python | mpl-2.0 | mrf345/FQM,mrf345/FQM,mrf345/FQM,mrf345/FQM | from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
Add additional application for gunicorn. | import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
| <commit_before>from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
<commit_msg>Add additional application for gunicorn.<commit_after> | import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
| from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
Add additional application for gunicorn.import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
| <commit_before>from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
<commit_msg>Add additional application for gunicorn.<commit_after>import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
4bd4c209862db759102c869da324db3e74347880 | gold_digger/settings/__init__.py | gold_digger/settings/__init__.py | # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
| # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, local.")
| Remove staging from supported profiles | Remove staging from supported profiles
| Python | apache-2.0 | business-factory/gold-digger | # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
Remove staging from supported profiles | # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, local.")
| <commit_before># -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
<commit_msg>Remove staging from supported profiles<commit_after> | # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, local.")
| # -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
Remove staging from supported profiles# -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, local.")
| <commit_before># -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, staging, local.")
<commit_msg>Remove staging from supported profiles<commit_after># -*- coding: utf-8 -*-
from os import environ, path
from ._settings_default import *
from ..exceptions import ImproperlyConfigured
PROFILE = environ.get("GOLD_DIGGER_PROFILE", "local")
if PROFILE == "master":
from ._settings_master import *
elif PROFILE == "local":
try:
from ._settings_local import *
except ImportError:
raise ImproperlyConfigured(
f"Local configuration not found. Create file _settings_local.py in {path.abspath(path.join(__file__, path.pardir))} directory according to README."
)
else:
raise ValueError(f"Unsupported settings profile. Got: {PROFILE}. Use one of: master, local.")
|
60f666a7d3aac09b5fa8e3df29d0ff08b67eac3c | tools/gyp/find_mac_gcc_version.py | tools/gyp/find_mac_gcc_version.py | #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
| #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
| Revert "Revert "Use clang on mac if XCode >= 4.5"" | Revert "Revert "Use clang on mac if XCode >= 4.5""
The V8 dependency has been removed, so we should be able to enable clang on mac
again.
R=ricow@google.com
Review URL: https://codereview.chromium.org//14751012
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@22304 260f80e4-7a28-3924-810f-c04153c831b5
| Python | bsd-3-clause | dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk | #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
Revert "Revert "Use clang on mac if XCode >= 4.5""
The V8 dependency has been removed, so we should be able to enable clang on mac
again.
R=ricow@google.com
Review URL: https://codereview.chromium.org//14751012
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@22304 260f80e4-7a28-3924-810f-c04153c831b5 | #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
| <commit_before>#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
<commit_msg>Revert "Revert "Use clang on mac if XCode >= 4.5""
The V8 dependency has been removed, so we should be able to enable clang on mac
again.
R=ricow@google.com
Review URL: https://codereview.chromium.org//14751012
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@22304 260f80e4-7a28-3924-810f-c04153c831b5<commit_after> | #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
| #!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
Revert "Revert "Use clang on mac if XCode >= 4.5""
The V8 dependency has been removed, so we should be able to enable clang on mac
again.
R=ricow@google.com
Review URL: https://codereview.chromium.org//14751012
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@22304 260f80e4-7a28-3924-810f-c04153c831b5#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
| <commit_before>#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major >= 4:
return 'com.apple.compilers.llvmgcc42'
elif major == 3 and minor >= 1:
return '4.2'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
<commit_msg>Revert "Revert "Use clang on mac if XCode >= 4.5""
The V8 dependency has been removed, so we should be able to enable clang on mac
again.
R=ricow@google.com
Review URL: https://codereview.chromium.org//14751012
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@22304 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>#!/usr/bin/env python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import re
import subprocess
import sys
def main():
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = job.communicate()
if job.returncode != 0:
print >>sys.stderr, stdout
print >>sys.stderr, stderr
raise Exception('Error %d running xcodebuild!' % job.returncode)
matches = re.findall('^Xcode (\d+)\.(\d+)(\.(\d+))?$', stdout, re.MULTILINE)
if len(matches) > 0:
major = int(matches[0][0])
minor = int(matches[0][1])
if major == 3 and minor >= 1:
return '4.2'
elif major == 4 and minor < 5:
return 'com.apple.compilers.llvmgcc42'
elif major == 4 and minor >= 5:
# XCode seems to select the specific clang version automatically
return 'com.apple.compilers.llvm.clang.1_0'
else:
raise Exception('Unknown XCode Version "%s"' % version_match)
else:
raise Exception('Could not parse output of xcodebuild "%s"' % stdout)
if __name__ == '__main__':
if sys.platform != 'darwin':
raise Exception("This script only runs on Mac")
print main()
|
2206b4b96805e35a612f471fb8f843b35fe45021 | code/dstruct/Word.py | code/dstruct/Word.py | #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
insent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner, _lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
#self.lemma = self.lemma.replace('"', "''")
#self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
| #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
in_sent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner,
_lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will
# crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
# self.lemma = self.lemma.replace('"', "''")
# self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
| Fix name of an attribute and PEP8-ify | Fix name of an attribute and PEP8-ify
| Python | apache-2.0 | HazyResearch/dd-genomics,HazyResearch/dd-genomics,HazyResearch/dd-genomics,amwenger/dd-genomics,HazyResearch/dd-genomics,rionda/dd-genomics,rionda/dd-genomics,amwenger/dd-genomics,amwenger/dd-genomics,HazyResearch/dd-genomics | #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
insent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner, _lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
#self.lemma = self.lemma.replace('"', "''")
#self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
Fix name of an attribute and PEP8-ify | #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
in_sent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner,
_lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will
# crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
# self.lemma = self.lemma.replace('"', "''")
# self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
| <commit_before>#! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
insent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner, _lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
#self.lemma = self.lemma.replace('"', "''")
#self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
<commit_msg>Fix name of an attribute and PEP8-ify<commit_after> | #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
in_sent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner,
_lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will
# crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
# self.lemma = self.lemma.replace('"', "''")
# self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
| #! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
insent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner, _lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
#self.lemma = self.lemma.replace('"', "''")
#self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
Fix name of an attribute and PEP8-ify#! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
in_sent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner,
_lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will
# crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
# self.lemma = self.lemma.replace('"', "''")
# self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
| <commit_before>#! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
insent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner, _lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
#self.lemma = self.lemma.replace('"', "''")
#self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
<commit_msg>Fix name of an attribute and PEP8-ify<commit_after>#! /usr/bin/env python3
""" A Word class
Originally obtained from the 'pharm' repository, but modified.
"""
class Word(object):
doc_id = None
sent_id = None
in_sent_idx = None
word = None
pos = None
ner = None
lemma = None
dep_path = None
dep_parent = None
sent_id = None
box = None
def __init__(self, _doc_id, _sent_id, _in_sent_idx, _word, _pos, _ner,
_lemma, _dep_path, _dep_parent, _box):
self.doc_id = _doc_id
self.sent_id = _sent_id
self.in_sent_idx = _in_sent_idx
self.word = _word
self.pos = _pos
self.ner = _ner
self.dep_parent = _dep_parent
self.dep_path = _dep_path
self.box = _box
self.lemma = _lemma
# If do not do the following, outputting an Array in the language will
# crash
# XXX (Matteo) This was in the pharm code, not sure what it means
# I actually don't think this should go here.
# self.lemma = self.lemma.replace('"', "''")
# self.lemma = self.lemma.replace('\\', "_")
def __repr__(self):
return self.word
# Return the NER tag if different than 'O', otherwise return the lemma
def get_feature(self):
if self.ner == 'O':
return self.lemma
else:
return self.ner
|
85a047ce00f86727b2d260c609049f82927825ff | byteaccess/__init__.py | byteaccess/__init__.py | from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
| from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
| Add version placeholder for now | Add version placeholder for now
| Python | bsd-2-clause | ChadSki/byteaccess,ChadSki/halolib | from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
Add version placeholder for now | from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
| <commit_before>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
<commit_msg>Add version placeholder for now<commit_after> | from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
| from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
Add version placeholder for nowfrom byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
| <commit_before>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
<commit_msg>Add version placeholder for now<commit_after>from byteaccess.byteaccess import ByteAccess, access_over_file
from byteaccess.winmemaccess import access_over_process
__version__ = 'TODO: Figure out Python 3 version conventions'
|
2d1c8a62295ed5150f31e11cdbbf98d4d74498d8 | bin/cgroup-limits.py | bin/cgroup-limits.py | #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
for item in env_vars.items():
print("=".join(item))
| #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
| Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit | Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit
| Python | apache-2.0 | openshift/sti-base,hhorak/sti-base,soltysh/sti-base,openshift/sti-base,bparees/sti-base,sclorg/s2i-base-container,mfojtik/sti-base,mfojtik/sti-base,bparees/sti-base | #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
for item in env_vars.items():
print("=".join(item))
Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit | #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
| <commit_before>#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
for item in env_vars.items():
print("=".join(item))
<commit_msg>Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit<commit_after> | #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
| #!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
for item in env_vars.items():
print("=".join(item))
Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
| <commit_before>#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
for item in env_vars.items():
print("=".join(item))
<commit_msg>Set MAX_MEMORY_LIMIT_IN_BYTES to number returned by cgroups where there is no limit<commit_after>#!/usr/bin/python
env_vars = {}
def read_file(path):
try:
with open(path, 'r') as f:
return f.read().strip()
except IOError:
return None
def get_memory_limit():
limit = read_file('/sys/fs/cgroup/memory/memory.limit_in_bytes')
if limit:
env_vars['MEMORY_LIMIT_IN_BYTES'] = limit
def get_number_of_cores():
core_count = 0
line = read_file('/sys/fs/cgroup/cpuset/cpuset.cpus')
if line is None:
return
for group in line.split(','):
core_ids = list(map(int, group.split('-')))
if len(core_ids) == 2:
core_count += core_ids[1] - core_ids[0] + 1
else:
core_count += 1
env_vars['NUMBER_OF_CORES'] = str(core_count)
get_memory_limit()
get_number_of_cores()
print("MAX_MEMORY_LIMIT_IN_BYTES=9223372036854775807")
for item in env_vars.items():
print("=".join(item))
|
911c79f8f76187601ac2df835541d6d76f172b3b | decorators.py | decorators.py | from django.db.models.signals import pre_save
from django.dispatch import receiver
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, None)
if field_value is None:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
| from django.db.models.signals import pre_save
from django.dispatch import receiver
class NoValue(object):
"""Empty class for disambiguating calls to getattr"""
pass
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, NoValue)
if field_value == NoValue:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
| Create NoValue class to disambiguate getattr calls | Create NoValue class to disambiguate getattr calls
| Python | mit | lamarmeigs/django-clean-fields | from django.db.models.signals import pre_save
from django.dispatch import receiver
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, None)
if field_value is None:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
Create NoValue class to disambiguate getattr calls | from django.db.models.signals import pre_save
from django.dispatch import receiver
class NoValue(object):
"""Empty class for disambiguating calls to getattr"""
pass
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, NoValue)
if field_value == NoValue:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
| <commit_before>from django.db.models.signals import pre_save
from django.dispatch import receiver
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, None)
if field_value is None:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
<commit_msg>Create NoValue class to disambiguate getattr calls<commit_after> | from django.db.models.signals import pre_save
from django.dispatch import receiver
class NoValue(object):
"""Empty class for disambiguating calls to getattr"""
pass
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, NoValue)
if field_value == NoValue:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
| from django.db.models.signals import pre_save
from django.dispatch import receiver
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, None)
if field_value is None:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
Create NoValue class to disambiguate getattr callsfrom django.db.models.signals import pre_save
from django.dispatch import receiver
class NoValue(object):
"""Empty class for disambiguating calls to getattr"""
pass
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, NoValue)
if field_value == NoValue:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
| <commit_before>from django.db.models.signals import pre_save
from django.dispatch import receiver
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, None)
if field_value is None:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
<commit_msg>Create NoValue class to disambiguate getattr calls<commit_after>from django.db.models.signals import pre_save
from django.dispatch import receiver
class NoValue(object):
"""Empty class for disambiguating calls to getattr"""
pass
def cleans_field(field_ref):
"""Decorator to registers a field cleaning methods on the pre_save signal.
Args:
field_ref (str): a label for the model field to clean, following the
convention `app_name.ModelName.field_name`
"""
app_name, model_name, field_name = field_ref.split('.')
model_label = '.'.join([app_name, model_name])
def _clean_wrapper(cleaner_function):
# Register a pre-save signal handler that calls the cleaner_function
# on model instance, and assigns the result to the instance's field
@receiver(pre_save, sender=model_label, weak=False)
def signal_handler(sender, instance, **kwargs):
"""Run the cleaner_function on instance's field"""
field_value = getattr(instance, field_name, NoValue)
if field_value == NoValue:
# TODO: raise warning:
# method decorated to clean field that doesn't exist
pass
field_cleaner = getattr(instance, cleaner_function.func_name)
setattr(instance, field_name, field_cleaner(field_value))
# To ensure the wrapped method can still be invoked, define an
# additional function that executes the method with the given arguments
# and returns the result.
def _run_cleaner(*args, **kwargs):
return cleaner_function(*args, **kwargs)
return _run_cleaner
return _clean_wrapper
|
25dfc009b380b2a63619651dbcba2c7d7ade929c | deep_parse.py | deep_parse.py | #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
pass
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
| #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
def __str__(self):
return 'DeepParseObject: %s' % self.__dict__
def __repr__(self):
return 'DeepParseObject: %r' % self.__dict__
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
| Add __repr__ and __str__ methods to dummy object. | Add __repr__ and __str__ methods to dummy object.
| Python | mit | bradojevic/deep-parse | #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
pass
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
Add __repr__ and __str__ methods to dummy object. | #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
def __str__(self):
return 'DeepParseObject: %s' % self.__dict__
def __repr__(self):
return 'DeepParseObject: %r' % self.__dict__
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
| <commit_before>#!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
pass
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
<commit_msg>Add __repr__ and __str__ methods to dummy object.<commit_after> | #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
def __str__(self):
return 'DeepParseObject: %s' % self.__dict__
def __repr__(self):
return 'DeepParseObject: %r' % self.__dict__
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
| #!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
pass
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
Add __repr__ and __str__ methods to dummy object.#!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
def __str__(self):
return 'DeepParseObject: %s' % self.__dict__
def __repr__(self):
return 'DeepParseObject: %r' % self.__dict__
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
| <commit_before>#!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
pass
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
<commit_msg>Add __repr__ and __str__ methods to dummy object.<commit_after>#!/usr/bin/env python
"""Simple library for parsing deeply nested structure (dict, json)
into regular object. You can specify fields to extract, and argument
names in created object.
Example
content = {
'name': 'Bob',
'details': {
'email': 'bob@email.com',
}
}
fields = (
('name', ),
('details__email', 'details_email')
)
item = deep_parse_dict(content, fields)
assert item.name == 'Bob'
assert item.details_email == 'bob@email.com'
"""
class DeepParseObject(object):
"""Simple dummy object to hold content."""
def __str__(self):
return 'DeepParseObject: %s' % self.__dict__
def __repr__(self):
return 'DeepParseObject: %r' % self.__dict__
def deep_parse_dict(content, fields, exc_class=Exception, separator='__'):
"""Extracting fields specified in ``fields`` from ``content``."""
deep_parse = DeepParseObject()
for field in fields:
try:
lookup_name, store_name = field[0], field[0]
if len(field) > 1:
lookup_name, store_name = field
parts = lookup_name.split(separator)
value = content
for part in parts:
value = value[part]
setattr(deep_parse, store_name, value)
except Exception as original_exc:
exc = exc_class('Error parsing field %r' % field)
exc.error_field = field
exc.original_exc = original_exc
raise exc
return deep_parse
|
905b7849d319e5147653754ceaead333873bd401 | packager/rpm/test/test_build.py | packager/rpm/test/test_build.py | #! /usr/bin/python
from build_rpm import BuildModelRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildModelRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildModelRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildModelRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildModelRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildModelRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildModelRPM("cem", "head")
#def test_cem_tagged_version():
# BuildModelRPM("cem", "0.2")
def test_child_version_head():
BuildModelRPM("child", "head")
def test_child_version_head():
BuildModelRPM("sedflux", "head")
| #! /usr/bin/python
from packager.rpm.build import BuildRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildRPM("cem", "head")
#def test_cem_tagged_version():
# BuildRPM("cem", "0.2")
def test_child_version_head():
BuildRPM("child", "head")
def test_child_version_head():
BuildRPM("sedflux", "head")
| Update unit tests for packager.rpm.build.py | Update unit tests for packager.rpm.build.py
| Python | mit | csdms/packagebuilder | #! /usr/bin/python
from build_rpm import BuildModelRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildModelRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildModelRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildModelRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildModelRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildModelRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildModelRPM("cem", "head")
#def test_cem_tagged_version():
# BuildModelRPM("cem", "0.2")
def test_child_version_head():
BuildModelRPM("child", "head")
def test_child_version_head():
BuildModelRPM("sedflux", "head")
Update unit tests for packager.rpm.build.py | #! /usr/bin/python
from packager.rpm.build import BuildRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildRPM("cem", "head")
#def test_cem_tagged_version():
# BuildRPM("cem", "0.2")
def test_child_version_head():
BuildRPM("child", "head")
def test_child_version_head():
BuildRPM("sedflux", "head")
| <commit_before>#! /usr/bin/python
from build_rpm import BuildModelRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildModelRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildModelRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildModelRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildModelRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildModelRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildModelRPM("cem", "head")
#def test_cem_tagged_version():
# BuildModelRPM("cem", "0.2")
def test_child_version_head():
BuildModelRPM("child", "head")
def test_child_version_head():
BuildModelRPM("sedflux", "head")
<commit_msg>Update unit tests for packager.rpm.build.py<commit_after> | #! /usr/bin/python
from packager.rpm.build import BuildRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildRPM("cem", "head")
#def test_cem_tagged_version():
# BuildRPM("cem", "0.2")
def test_child_version_head():
BuildRPM("child", "head")
def test_child_version_head():
BuildRPM("sedflux", "head")
| #! /usr/bin/python
from build_rpm import BuildModelRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildModelRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildModelRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildModelRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildModelRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildModelRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildModelRPM("cem", "head")
#def test_cem_tagged_version():
# BuildModelRPM("cem", "0.2")
def test_child_version_head():
BuildModelRPM("child", "head")
def test_child_version_head():
BuildModelRPM("sedflux", "head")
Update unit tests for packager.rpm.build.py#! /usr/bin/python
from packager.rpm.build import BuildRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildRPM("cem", "head")
#def test_cem_tagged_version():
# BuildRPM("cem", "0.2")
def test_child_version_head():
BuildRPM("child", "head")
def test_child_version_head():
BuildRPM("sedflux", "head")
| <commit_before>#! /usr/bin/python
from build_rpm import BuildModelRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildModelRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildModelRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildModelRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildModelRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildModelRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildModelRPM("cem", "head")
#def test_cem_tagged_version():
# BuildModelRPM("cem", "0.2")
def test_child_version_head():
BuildModelRPM("child", "head")
def test_child_version_head():
BuildModelRPM("sedflux", "head")
<commit_msg>Update unit tests for packager.rpm.build.py<commit_after>#! /usr/bin/python
from packager.rpm.build import BuildRPM
from nose.tools import *
@raises(TypeError)
def test_fail_with_no_parameters():
BuildRPM(None)
@raises(TypeError)
def test_fail_with_one_parameter():
BuildRPM("hydrotrend")
def test_hydrotrend_version_none():
BuildRPM("hydrotrend", None)
def test_hydrotrend_version_head():
BuildRPM("hydrotrend", "head")
#def test_hydrotrend_tagged_version():
# BuildRPM("hydrotrend", "3.0.2")
def test_cem_version_head():
BuildRPM("cem", "head")
#def test_cem_tagged_version():
# BuildRPM("cem", "0.2")
def test_child_version_head():
BuildRPM("child", "head")
def test_child_version_head():
BuildRPM("sedflux", "head")
|
cbcd4ebcc01646382595ce9ee10d278120ce00ce | pets/common/views.py | pets/common/views.py | from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
pets = models.Pet.objects.select_related('city').order_by('-id')[:6]
return render(request, 'common/home.html', {'pets': pets})
| from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
return render(request, 'common/home.html')
| Remove useless code from the home view | Remove useless code from the home view
| Python | mit | dirtycoder/pets,dirtycoder/pets,dirtycoder/pets | from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
pets = models.Pet.objects.select_related('city').order_by('-id')[:6]
return render(request, 'common/home.html', {'pets': pets})
Remove useless code from the home view | from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
return render(request, 'common/home.html')
| <commit_before>from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
pets = models.Pet.objects.select_related('city').order_by('-id')[:6]
return render(request, 'common/home.html', {'pets': pets})
<commit_msg>Remove useless code from the home view<commit_after> | from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
return render(request, 'common/home.html')
| from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
pets = models.Pet.objects.select_related('city').order_by('-id')[:6]
return render(request, 'common/home.html', {'pets': pets})
Remove useless code from the home viewfrom django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
return render(request, 'common/home.html')
| <commit_before>from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
pets = models.Pet.objects.select_related('city').order_by('-id')[:6]
return render(request, 'common/home.html', {'pets': pets})
<commit_msg>Remove useless code from the home view<commit_after>from django.db.models import Count
from django.shortcuts import render
from django.views.generic import TemplateView
from django.views.generic.base import ContextMixin
from meupet import models
def get_adoption_kinds():
return get_kind_list([models.Pet.FOR_ADOPTION, models.Pet.ADOPTED])
def get_lost_kinds():
return get_kind_list([models.Pet.MISSING, models.Pet.FOUND])
def get_kind_list(status):
return models.Kind.objects.filter(pet__status__in=status).annotate(num_pets=Count('pet')).order_by('kind')
class MeuPetEspecieMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(MeuPetEspecieMixin, self).get_context_data(**kwargs)
context['kind_lost'] = get_lost_kinds()
context['kind_adoption'] = get_adoption_kinds()
return context
class AboutPageView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/about.html'
class AssociacoesView(MeuPetEspecieMixin, TemplateView):
template_name = 'staticpages/associacoes.html'
def not_found(request):
return render(request, 'staticpages/404.html')
def home(request):
return render(request, 'common/home.html')
|
a8c824741ac0c4bd9a12c2fa62050b2608e7ad22 | kqml/kqml_string.py | kqml/kqml_string.py | from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(object):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
| from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(KQMLObject):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
| Make KQMLString subclass of KQMLObject. | Make KQMLString subclass of KQMLObject.
| Python | bsd-2-clause | bgyori/pykqml | from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(object):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
Make KQMLString subclass of KQMLObject. | from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(KQMLObject):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
| <commit_before>from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(object):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
<commit_msg>Make KQMLString subclass of KQMLObject.<commit_after> | from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(KQMLObject):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
| from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(object):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
Make KQMLString subclass of KQMLObject.from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(KQMLObject):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
| <commit_before>from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(object):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
<commit_msg>Make KQMLString subclass of KQMLObject.<commit_after>from io import BytesIO
from kqml import KQMLObject
from .util import safe_decode
class KQMLString(KQMLObject):
def __init__(self, data=None):
if data is None:
self.data = ''
else:
self.data = safe_decode(data)
def __len__(self):
return len(self.data)
def char_at(self, n):
return self.data[n]
def equals(self, obj):
if not isinstance(obj, KQMLString):
return False
else:
return obj.data == self.data
def write(self, out):
out.write(b'"')
for ch in self.data:
if ch == '"':
out.write(b'\\')
out.write(ch.encode())
out.write(b'"')
def to_string(self):
out = BytesIO()
self.write(out)
return safe_decode(out.getvalue())
def string_value(self):
return self.data
def __str__(self):
return safe_decode(self.to_string())
def __repr__(self):
s = self.__str__()
s = s.replace('\n', '\\n')
return s
def __getitem__(self, *args):
return self.data.__getitem__(*args)
|
736d4b8207cb63bda86f4338c08cc8719416fbe7 | gauge/__init__.py | gauge/__init__.py | # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
| # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
try:
import __pypy__
except ImportError:
pass
else:
# Here's a workaround which makes :class:`Gauge` be picklable in PyPy.
# ``Gauge.__module__`` and ``__name__`` should be ``'gauge.core'`` and
# ``'Gauge'``. But in PyPy, unlike CPython, they are ``'gauge'`` and
# ``'core.Gauge'``.
locals()['core.Gauge'] = Gauge
del __pypy__
| Fix pickle failure in PyPy | Fix pickle failure in PyPy
| Python | bsd-3-clause | what-studio/gauge | # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
Fix pickle failure in PyPy | # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
try:
import __pypy__
except ImportError:
pass
else:
# Here's a workaround which makes :class:`Gauge` be picklable in PyPy.
# ``Gauge.__module__`` and ``__name__`` should be ``'gauge.core'`` and
# ``'Gauge'``. But in PyPy, unlike CPython, they are ``'gauge'`` and
# ``'core.Gauge'``.
locals()['core.Gauge'] = Gauge
del __pypy__
| <commit_before># -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
<commit_msg>Fix pickle failure in PyPy<commit_after> | # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
try:
import __pypy__
except ImportError:
pass
else:
# Here's a workaround which makes :class:`Gauge` be picklable in PyPy.
# ``Gauge.__module__`` and ``__name__`` should be ``'gauge.core'`` and
# ``'Gauge'``. But in PyPy, unlike CPython, they are ``'gauge'`` and
# ``'core.Gauge'``.
locals()['core.Gauge'] = Gauge
del __pypy__
| # -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
Fix pickle failure in PyPy# -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
try:
import __pypy__
except ImportError:
pass
else:
# Here's a workaround which makes :class:`Gauge` be picklable in PyPy.
# ``Gauge.__module__`` and ``__name__`` should be ``'gauge.core'`` and
# ``'Gauge'``. But in PyPy, unlike CPython, they are ``'gauge'`` and
# ``'core.Gauge'``.
locals()['core.Gauge'] = Gauge
del __pypy__
| <commit_before># -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
<commit_msg>Fix pickle failure in PyPy<commit_after># -*- coding: utf-8 -*-
"""
gauge
~~~~~
Deterministic linear gauge library.
:copyright: (c) 2013-2017 by What! Studio
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gauge.__about__ import __version__ # noqa
from gauge.constants import CLAMP, ERROR, OK, ONCE
from gauge.core import Gauge, Momentum
__all__ = ['Gauge', 'Momentum', 'ERROR', 'OK', 'ONCE', 'CLAMP']
try:
import __pypy__
except ImportError:
pass
else:
# Here's a workaround which makes :class:`Gauge` be picklable in PyPy.
# ``Gauge.__module__`` and ``__name__`` should be ``'gauge.core'`` and
# ``'Gauge'``. But in PyPy, unlike CPython, they are ``'gauge'`` and
# ``'core.Gauge'``.
locals()['core.Gauge'] = Gauge
del __pypy__
|
cfcd3aa71001f74915a938aa0ec1ae58c4db3e06 | src/oscar_accounts/__init__.py | src/oscar_accounts/__init__.py | # Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
| import os
# Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
| Undo removing `import os` statement | Undo removing `import os` statement
| Python | bsd-3-clause | django-oscar/django-oscar-accounts,django-oscar/django-oscar-accounts | # Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
Undo removing `import os` statement | import os
# Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
| <commit_before># Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
<commit_msg>Undo removing `import os` statement<commit_after> | import os
# Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
| # Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
Undo removing `import os` statementimport os
# Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
| <commit_before># Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
<commit_msg>Undo removing `import os` statement<commit_after>import os
# Setting for template directory not found by app_directories.Loader. This
# allows templates to be identified by two paths which enables a template to be
# extended by a template with the same identifier.
TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'templates/accounts')
default_app_config = 'oscar_accounts.config.OscarAccountsConfig'
|
9a239c993502e3f317edd478a5d8b5f225c24b18 | globus_cli/run.py | globus_cli/run.py | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| Fix doubled-help on main command | Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext.
| Python | apache-2.0 | globus/globus-cli,globus/globus-cli | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext. | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| <commit_before>from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
<commit_msg>Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext.<commit_after> | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext.from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| <commit_before>from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
<commit_msg>Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext.<commit_after>from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
|
d406aa60f31f5e318a46e84539546a4452574ce6 | src/rtruffle/source_section.py | src/rtruffle/source_section.py | class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = None, file = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
| class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = 0, file_name = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file_name
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
| Initialize char_length with a number | Initialize char_length with a number
And make var name `file` less ambiguous by using `file_name` instead.
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
| Python | mit | SOM-st/RPySOM,smarr/RTruffleSOM,SOM-st/RTruffleSOM,smarr/PySOM,SOM-st/PySOM,SOM-st/RPySOM,smarr/RTruffleSOM,SOM-st/RTruffleSOM,smarr/PySOM,SOM-st/PySOM | class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = None, file = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
Initialize char_length with a number
And make var name `file` less ambiguous by using `file_name` instead.
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de> | class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = 0, file_name = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file_name
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
| <commit_before>class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = None, file = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
<commit_msg>Initialize char_length with a number
And make var name `file` less ambiguous by using `file_name` instead.
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after> | class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = 0, file_name = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file_name
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
| class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = None, file = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
Initialize char_length with a number
And make var name `file` less ambiguous by using `file_name` instead.
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = 0, file_name = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file_name
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
| <commit_before>class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = None, file = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
<commit_msg>Initialize char_length with a number
And make var name `file` less ambiguous by using `file_name` instead.
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>class SourceCoordinate(object):
_immutable_fields_ = ['_start_line', '_start_column', '_char_idx']
def __init__(self, start_line, start_column, char_idx):
self._start_line = start_line
self._start_column = start_column
self._char_idx = char_idx
def get_start_line(self):
return self._start_line
def get_start_column(self):
return self._start_column
class SourceSection(object):
_immutable_fields_ = ['_source', '_identifier', '_coord', '_char_length']
def __init__(self, source = None, identifier = None, coord = None,
char_length = 0, file_name = None, source_section = None):
if source_section:
self._source = source_section._source
self._coord = source_section._coord
self._char_length = source_section._char_length
self._file = source_section._file
else:
self._source = source
self._coord = coord
self._char_length = char_length
self._file = file_name
self._identifier = identifier
def __str__(self):
return "%s:%d:%d" % (self._file, self._coord.get_start_line(),
self._coord.get_start_column())
|
321a0cea6a71e29a3f00116c52c1056d7dcfef7e | daskfunk/__init__.py | daskfunk/__init__.py | from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
| from __future__ import absolute_import, division, print_function
from .core import compile
| Remove import of deleted file | Remove import of deleted file
| Python | mit | Savvysherpa/dask-funk | from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
Remove import of deleted file | from __future__ import absolute_import, division, print_function
from .core import compile
| <commit_before>from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
<commit_msg>Remove import of deleted file<commit_after> | from __future__ import absolute_import, division, print_function
from .core import compile
| from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
Remove import of deleted filefrom __future__ import absolute_import, division, print_function
from .core import compile
| <commit_before>from __future__ import absolute_import, division, print_function
from .core import compile
from ._info import __version__
<commit_msg>Remove import of deleted file<commit_after>from __future__ import absolute_import, division, print_function
from .core import compile
|
aff8945aef3f10fa9d1243b25301e84611c27422 | aleph/views/status_api.py | aleph/views/status_api.py | import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
| import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
| Load only the active collections instead of all accessible collections | Load only the active collections instead of all accessible collections
| Python | mit | alephdata/aleph,alephdata/aleph,pudo/aleph,alephdata/aleph,pudo/aleph,pudo/aleph,alephdata/aleph,alephdata/aleph | import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
Load only the active collections instead of all accessible collections | import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
| <commit_before>import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
<commit_msg>Load only the active collections instead of all accessible collections<commit_after> | import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
| import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
Load only the active collections instead of all accessible collectionsimport logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
| <commit_before>import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.logic import resolver
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
for collection_id in collections:
resolver.queue(request, Collection, collection_id)
resolver.resolve(request)
results = []
for collection_id in collections:
data = resolver.get(request, Collection, collection_id)
if data is None:
continue
fid = data['foreign_id']
if fid in active_foreign_ids:
result = active_collections[fid]
result['collection'] = data
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
<commit_msg>Load only the active collections instead of all accessible collections<commit_after>import logging
from flask import Blueprint, request
from aleph.model import Collection
from aleph.queues import get_active_collection_status
from aleph.views.util import jsonify
from aleph.views.util import require
log = logging.getLogger(__name__)
blueprint = Blueprint('status_api', __name__)
@blueprint.route('/api/2/status', methods=['GET'])
def status():
require(request.authz.logged_in)
status = get_active_collection_status()
active_collections = status.pop('datasets')
active_foreign_ids = set(active_collections.keys())
collections = request.authz.collections(request.authz.READ)
results = []
for fid in active_foreign_ids:
collection = Collection.by_foreign_id(fid)
if collection is None:
continue
if collection.id in collections:
result = active_collections[fid]
result['collection'] = collection.to_dict()
result['id'] = fid
results.append(result)
status['results'] = results
return jsonify(status)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.