commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
abf91ac218c2386a2366eae243a15b1215f47832 | teuthology/task/tests/test_run.py | teuthology/task/tests/test_run.py | import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
| import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python3", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python3", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python3", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
| Fix reference to python binary | task.tests: Fix reference to python binary
It was trying to use `python` as opposed to `python3`.
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>
| Python | mit | ktdreyer/teuthology,ceph/teuthology,ceph/teuthology,ktdreyer/teuthology | import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
task.tests: Fix reference to python binary
It was trying to use `python` as opposed to `python3`.
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com> | import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python3", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python3", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python3", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
| <commit_before>import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
<commit_msg>task.tests: Fix reference to python binary
It was trying to use `python` as opposed to `python3`.
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com><commit_after> | import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python3", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python3", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python3", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
| import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
task.tests: Fix reference to python binary
It was trying to use `python` as opposed to `python3`.
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com>import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python3", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python3", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python3", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
| <commit_before>import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
<commit_msg>task.tests: Fix reference to python binary
It was trying to use `python` as opposed to `python3`.
Signed-off-by: Zack Cerza <d7cdf09fc0f0426e98c9978ee42da5d61fa54986@redhat.com><commit_after>import logging
import pytest
from io import StringIO
from teuthology.exceptions import CommandFailedError
log = logging.getLogger(__name__)
class TestRun(object):
"""
Tests to see if we can make remote procedure calls to the current cluster
"""
def test_command_failed_label(self, ctx, config):
result = ""
try:
ctx.cluster.run(
args=["python3", "-c", "assert False"],
label="working as expected, nothing to see here"
)
except CommandFailedError as e:
result = str(e)
assert "working as expected" in result
def test_command_failed_no_label(self, ctx, config):
with pytest.raises(CommandFailedError):
ctx.cluster.run(
args=["python3", "-c", "assert False"],
)
def test_command_success(self, ctx, config):
result = StringIO()
ctx.cluster.run(
args=["python3", "-c", "print('hi')"],
stdout=result
)
assert result.getvalue().strip() == "hi"
|
3d980016ad5fd65bb167d2f44a83c78e52ebb7b5 | applications/plugins/SofaPython/python/SofaPython/PythonAdvancedTimer.py | applications/plugins/SofaPython/python/SofaPython/PythonAdvancedTimer.py | import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutPutType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
| import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutputType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
| FIX crash in python script when visualizing advanced timer output | [SofaPython] FIX crash in python script when visualizing advanced timer output
| Python | lgpl-2.1 | FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa,FabienPean/sofa | import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutPutType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
[SofaPython] FIX crash in python script when visualizing advanced timer output | import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutputType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
| <commit_before>import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutPutType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
<commit_msg>[SofaPython] FIX crash in python script when visualizing advanced timer output<commit_after> | import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutputType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
| import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutPutType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
[SofaPython] FIX crash in python script when visualizing advanced timer outputimport os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutputType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
| <commit_before>import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutPutType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
<commit_msg>[SofaPython] FIX crash in python script when visualizing advanced timer output<commit_after>import os
import sys
import Sofa
# ploting
import matplotlib.pyplot as plt
# JSON deconding
from collections import OrderedDict
import json
# argument parser: usage via the command line
import argparse
def measureAnimationTime(node, timerName, timerInterval, timerOutputType, resultFileName, simulationDeltaTime, iterations):
# timer
Sofa.timerSetInterval(timerName, timerInterval) # Set the number of steps neded to compute the timer
Sofa.timerSetEnabled(timerName, True)
resultFileName = resultFileName + ".log"
rootNode = node.getRoot()
with open(resultFileName, "w+") as outputFile :
outputFile.write("{")
i = 0
Sofa.timerSetOutputType(timerName, timerOutputType)
while i < iterations:
Sofa.timerBegin(timerName)
rootNode.simulationStep(simulationDeltaTime)
result = Sofa.timerEnd(timerName, rootNode)
if result != None :
outputFile.write(result + ",")
oldResult = result
i = i+1
last_pose = outputFile.tell()
outputFile.seek(last_pose - 1)
outputFile.write("\n}")
outputFile.seek(7)
firstStep = outputFile.read(1)
outputFile.close()
Sofa.timerSetEnabled(timerName, 0)
print "[Scene info]: end of simulation."
return 0
|
ffc1b8c83e32f4c2b5454a0ae71b9c30cc8e7596 | toolz/tests/test_serialization.py | toolz/tests/test_serialization.py | from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
| from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
| Add serialization test for `complement` | Add serialization test for `complement`
| Python | bsd-3-clause | pombredanne/toolz,simudream/toolz,machinelearningdeveloper/toolz,quantopian/toolz,jdmcbr/toolz,bartvm/toolz,jcrist/toolz,cpcloud/toolz,pombredanne/toolz,quantopian/toolz,simudream/toolz,machinelearningdeveloper/toolz,bartvm/toolz,llllllllll/toolz,jdmcbr/toolz,llllllllll/toolz,cpcloud/toolz,jcrist/toolz | from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
Add serialization test for `complement` | from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
| <commit_before>from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
<commit_msg>Add serialization test for `complement`<commit_after> | from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
| from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
Add serialization test for `complement`from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
| <commit_before>from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
<commit_msg>Add serialization test for `complement`<commit_after>from toolz import *
import pickle
def test_compose():
f = compose(str, sum)
g = pickle.loads(pickle.dumps(f))
assert f((1, 2)) == g((1, 2))
def test_curry():
f = curry(map)(str)
g = pickle.loads(pickle.dumps(f))
assert list(f((1, 2, 3))) == list(g((1, 2, 3)))
def test_juxt():
f = juxt(str, int, bool)
g = pickle.loads(pickle.dumps(f))
assert f(1) == g(1)
assert f.funcs == g.funcs
def test_complement():
f = complement(bool)
assert f(True) is False
assert f(False) is True
g = pickle.loads(pickle.dumps(f))
assert f(True) == g(True)
assert f(False) == g(False)
|
7318e3f1a6169ed7b708d6f6f09816f1ff88419a | printer.py | printer.py | #!/usr/bin/env python2
from PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
| #!/usr/bin/env python3
from src.PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
| Load the right file for PrinterApplication | Load the right file for PrinterApplication
| Python | agpl-3.0 | markwal/Cura,totalretribution/Cura,quillford/Cura,bq/Ultimaker-Cura,DeskboxBrazil/Cura,totalretribution/Cura,quillford/Cura,ynotstartups/Wanhao,Curahelper/Cura,hmflash/Cura,derekhe/Cura,derekhe/Cura,hmflash/Cura,fxtentacle/Cura,fieldOfView/Cura,fxtentacle/Cura,lo0ol/Ultimaker-Cura,ynotstartups/Wanhao,ad1217/Cura,fieldOfView/Cura,bq/Ultimaker-Cura,DeskboxBrazil/Cura,senttech/Cura,Curahelper/Cura,ad1217/Cura,senttech/Cura,lo0ol/Ultimaker-Cura,markwal/Cura | #!/usr/bin/env python2
from PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
Load the right file for PrinterApplication | #!/usr/bin/env python3
from src.PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
| <commit_before>#!/usr/bin/env python2
from PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
<commit_msg>Load the right file for PrinterApplication<commit_after> | #!/usr/bin/env python3
from src.PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
| #!/usr/bin/env python2
from PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
Load the right file for PrinterApplication#!/usr/bin/env python3
from src.PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
| <commit_before>#!/usr/bin/env python2
from PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
<commit_msg>Load the right file for PrinterApplication<commit_after>#!/usr/bin/env python3
from src.PrinterApplication import PrinterApplication
app = PrinterApplication.getInstance()
app.run()
|
f04ccb741ea059aed8891f647ff19b26172ba61c | src/tvmaze/parsers/__init__.py | src/tvmaze/parsers/__init__.py | """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: int,
) -> datetime.timedelta:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
return datetime.timedelta(minutes=val)
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
| """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: typing.Optional[int],
) -> typing.Optional[datetime.timedelta]:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
try:
return datetime.timedelta(minutes=val)
except TypeError:
if val is not None:
raise
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
| Fix parsing duration when duration is None | Fix parsing duration when duration is None
Fixes tvmaze/tvmaze#14
| Python | mit | tvmaze/tvmaze | """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: int,
) -> datetime.timedelta:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
return datetime.timedelta(minutes=val)
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
Fix parsing duration when duration is None
Fixes tvmaze/tvmaze#14 | """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: typing.Optional[int],
) -> typing.Optional[datetime.timedelta]:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
try:
return datetime.timedelta(minutes=val)
except TypeError:
if val is not None:
raise
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
| <commit_before>"""Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: int,
) -> datetime.timedelta:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
return datetime.timedelta(minutes=val)
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
<commit_msg>Fix parsing duration when duration is None
Fixes tvmaze/tvmaze#14<commit_after> | """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: typing.Optional[int],
) -> typing.Optional[datetime.timedelta]:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
try:
return datetime.timedelta(minutes=val)
except TypeError:
if val is not None:
raise
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
| """Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: int,
) -> datetime.timedelta:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
return datetime.timedelta(minutes=val)
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
Fix parsing duration when duration is None
Fixes tvmaze/tvmaze#14"""Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: typing.Optional[int],
) -> typing.Optional[datetime.timedelta]:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
try:
return datetime.timedelta(minutes=val)
except TypeError:
if val is not None:
raise
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
| <commit_before>"""Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: int,
) -> datetime.timedelta:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
return datetime.timedelta(minutes=val)
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
<commit_msg>Fix parsing duration when duration is None
Fixes tvmaze/tvmaze#14<commit_after>"""Parse data from TVMaze."""
import datetime
import typing
def parse_date(
val: typing.Optional[str],
) -> typing.Optional[datetime.date]:
"""
Parse date from TVMaze API.
:param val: A date string
:return: A datetime.date object
"""
fmt = '%Y-%m-%d'
try:
return datetime.datetime.strptime(val, fmt).date()
except TypeError:
if val is not None:
raise
def parse_duration(
val: typing.Optional[int],
) -> typing.Optional[datetime.timedelta]:
"""
Parse duration from TVMaze API.
:param val: A duration in minutes
:return: A datetime.timedelta object
"""
try:
return datetime.timedelta(minutes=val)
except TypeError:
if val is not None:
raise
def parse_time(
val: str,
) -> typing.Optional[datetime.time]:
"""
Parse time from TVMaze API.
:param val: A time string
:return: A datetime.time object
"""
fmt = '%H:%M'
try:
return datetime.datetime.strptime(val, fmt).time()
except ValueError:
if val != '':
raise
|
a50cca78f400077d56b328a20661c1a9d1e2aff4 | app/tests/test_generate_profiles.py | app/tests/test_generate_profiles.py | import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
| import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
| Add a requirement for serving the assets in all tests | Add a requirement for serving the assets in all tests
| Python | mit | nyodas/enjoliver,kirek007/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver | import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
Add a requirement for serving the assets in all tests | import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
| <commit_before>import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
<commit_msg>Add a requirement for serving the assets in all tests<commit_after> | import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
| import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
Add a requirement for serving the assets in all testsimport os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
| <commit_before>import os
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
<commit_msg>Add a requirement for serving the assets in all tests<commit_after>import os
import subprocess
from unittest import TestCase
import re
from app import generate_profiles
class TestGenerateProfiles(TestCase):
gen = generate_profiles.GenerateProfiles
network_environment = "%s/misc/network-environment" % gen.bootcfg_path
@classmethod
def setUpClass(cls):
subprocess.check_output(["make", "-C", cls.gen.project_path])
cls.gen = generate_profiles.GenerateProfiles()
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
@classmethod
def tearDownClass(cls):
if os.path.isfile("%s" % cls.network_environment):
os.remove("%s" % cls.network_environment)
def test_00_ip_address(self):
self.assertFalse(os.path.isfile("%s" % self.network_environment))
ip = self.gen.ip_address
match = re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)
self.assertIsNotNone(match)
self.assertTrue(os.path.isfile("%s" % self.network_environment))
|
9cc39104b96a197a1f42667964f32f9671b5125f | ch01/sin_graph.py | ch01/sin_graph.py | # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 7, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show() | # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 6, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()
| Modify np.arange from 7 to 6 | Modify np.arange from 7 to 6 | Python | mit | kgsn1763/deep-learning-from-scratch,oreilly-japan/deep-learning-from-scratch | # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 7, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()Modify np.arange from 7 to 6 | # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 6, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()
| <commit_before># coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 7, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()<commit_msg>Modify np.arange from 7 to 6<commit_after> | # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 6, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()
| # coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 7, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()Modify np.arange from 7 to 6# coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 6, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()
| <commit_before># coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 7, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()<commit_msg>Modify np.arange from 7 to 6<commit_after># coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
# データの作成
x = np.arange(0, 6, 0.1)
y = np.sin(x)
# グラフの描画
plt.plot(x, y)
plt.show()
|
44db9de83aad25a1302ac4c31450a525c0095583 | binobj/__init__.py | binobj/__init__.py | """
binobj
======
A Python library for reading and writing structured binary data.
"""
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
| """
binobj
======
A Python library for reading and writing structured binary data.
"""
# pylint: disable=wildcard-import,unused-import
from .errors import *
from .fields import *
from .serialization import *
from .structures import *
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
| Add wildcard imports at root. | Add wildcard imports at root.
| Python | bsd-3-clause | dargueta/binobj | """
binobj
======
A Python library for reading and writing structured binary data.
"""
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
Add wildcard imports at root. | """
binobj
======
A Python library for reading and writing structured binary data.
"""
# pylint: disable=wildcard-import,unused-import
from .errors import *
from .fields import *
from .serialization import *
from .structures import *
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
| <commit_before>"""
binobj
======
A Python library for reading and writing structured binary data.
"""
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
<commit_msg>Add wildcard imports at root.<commit_after> | """
binobj
======
A Python library for reading and writing structured binary data.
"""
# pylint: disable=wildcard-import,unused-import
from .errors import *
from .fields import *
from .serialization import *
from .structures import *
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
| """
binobj
======
A Python library for reading and writing structured binary data.
"""
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
Add wildcard imports at root."""
binobj
======
A Python library for reading and writing structured binary data.
"""
# pylint: disable=wildcard-import,unused-import
from .errors import *
from .fields import *
from .serialization import *
from .structures import *
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
| <commit_before>"""
binobj
======
A Python library for reading and writing structured binary data.
"""
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
<commit_msg>Add wildcard imports at root.<commit_after>"""
binobj
======
A Python library for reading and writing structured binary data.
"""
# pylint: disable=wildcard-import,unused-import
from .errors import *
from .fields import *
from .serialization import *
from .structures import *
__version_info__ = (0, 1, 0)
__version__ = '.'.join(str(v) for v in __version_info__)
|
98190f0e96b2e2880e81b4801ebd5b04c1e9f1d8 | geomdl/__init__.py | geomdl/__init__.py | """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
| """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
| Fix importing * (star) from package | Fix importing * (star) from package
| Python | mit | orbingol/NURBS-Python,orbingol/NURBS-Python | """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
Fix importing * (star) from package | """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
| <commit_before>""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
<commit_msg>Fix importing * (star) from package<commit_after> | """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
| """ This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
Fix importing * (star) from package""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
| <commit_before>""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
<commit_msg>Fix importing * (star) from package<commit_after>""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
|
44798dff0992d1c4e62bea97d4deaee1eed657e7 | docs/conf.py | docs/conf.py | #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {"**": ["sidebarlogo.html", "globaltoc.html", "searchbox.html"]}
htmlhelp_basename = "dependenciesdoc"
| #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": ["sidebarlogo.html", "globaltoc.html", "relations.html", "searchbox.html"]
}
html_theme_options = {"show_related": True}
| Enable prev/next links in the docs. | Enable prev/next links in the docs.
| Python | bsd-2-clause | proofit404/dependencies,proofit404/dependencies,proofit404/dependencies,proofit404/dependencies | #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {"**": ["sidebarlogo.html", "globaltoc.html", "searchbox.html"]}
htmlhelp_basename = "dependenciesdoc"
Enable prev/next links in the docs. | #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": ["sidebarlogo.html", "globaltoc.html", "relations.html", "searchbox.html"]
}
html_theme_options = {"show_related": True}
| <commit_before>#!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {"**": ["sidebarlogo.html", "globaltoc.html", "searchbox.html"]}
htmlhelp_basename = "dependenciesdoc"
<commit_msg>Enable prev/next links in the docs.<commit_after> | #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": ["sidebarlogo.html", "globaltoc.html", "relations.html", "searchbox.html"]
}
html_theme_options = {"show_related": True}
| #!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {"**": ["sidebarlogo.html", "globaltoc.html", "searchbox.html"]}
htmlhelp_basename = "dependenciesdoc"
Enable prev/next links in the docs.#!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": ["sidebarlogo.html", "globaltoc.html", "relations.html", "searchbox.html"]
}
html_theme_options = {"show_related": True}
| <commit_before>#!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {"**": ["sidebarlogo.html", "globaltoc.html", "searchbox.html"]}
htmlhelp_basename = "dependenciesdoc"
<commit_msg>Enable prev/next links in the docs.<commit_after>#!/usr/bin/env python3
templates_path = ["templates"]
source_suffix = ".rst"
master_doc = "index"
project = "dependencies"
copyright = "2016-2018, Artem Malyshev"
author = "Artem Malyshev"
version = "0.14"
release = "0.14"
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = "alabaster"
html_static_path = ["static"]
html_sidebars = {
"**": ["sidebarlogo.html", "globaltoc.html", "relations.html", "searchbox.html"]
}
html_theme_options = {"show_related": True}
|
01f9649c9a661f1bf7289d3e6ea585b00ed48af3 | docs/conf.py | docs/conf.py | import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
| import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
latex_elements = {
'papersize': 'a4paper',
'classoptions': ',openany,oneside',
'babel': '\\usepackage[english]{babel}',
'preamble': '\usepackage{microtype}',
}
latex_documents = [
('index', 'openproviderpy.tex', u'openprovider.py Documentation',
u'Antagonist B.V.', 'manual'),
]
| Enable LaTeX output for docs | Enable LaTeX output for docs
| Python | mit | AntagonistHQ/openprovider.py | import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
Enable LaTeX output for docs | import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
latex_elements = {
'papersize': 'a4paper',
'classoptions': ',openany,oneside',
'babel': '\\usepackage[english]{babel}',
'preamble': '\usepackage{microtype}',
}
latex_documents = [
('index', 'openproviderpy.tex', u'openprovider.py Documentation',
u'Antagonist B.V.', 'manual'),
]
| <commit_before>import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
<commit_msg>Enable LaTeX output for docs<commit_after> | import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
latex_elements = {
'papersize': 'a4paper',
'classoptions': ',openany,oneside',
'babel': '\\usepackage[english]{babel}',
'preamble': '\usepackage{microtype}',
}
latex_documents = [
('index', 'openproviderpy.tex', u'openprovider.py Documentation',
u'Antagonist B.V.', 'manual'),
]
| import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
Enable LaTeX output for docsimport sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
latex_elements = {
'papersize': 'a4paper',
'classoptions': ',openany,oneside',
'babel': '\\usepackage[english]{babel}',
'preamble': '\usepackage{microtype}',
}
latex_documents = [
('index', 'openproviderpy.tex', u'openprovider.py Documentation',
u'Antagonist B.V.', 'manual'),
]
| <commit_before>import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
<commit_msg>Enable LaTeX output for docs<commit_after>import sys
import os
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
master_doc = 'index'
project = u'openprovider.py'
copyright = u'2014, Antagonist B.V'
version = '0.0.1'
release = '0.0.1'
html_static_path = ['_static']
templates_path = ['_templates']
source_suffix = '.rst'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
htmlhelp_basename = 'openproviderpydoc'
latex_elements = {
'papersize': 'a4paper',
'classoptions': ',openany,oneside',
'babel': '\\usepackage[english]{babel}',
'preamble': '\usepackage{microtype}',
}
latex_documents = [
('index', 'openproviderpy.tex', u'openprovider.py Documentation',
u'Antagonist B.V.', 'manual'),
]
|
f1f6848557428e9b2fc39c6b0d476279a0f5dd5c | docs/conf.py | docs/conf.py | import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| Define date in docs dynamically | Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
| Python | bsd-3-clause | pymanopt/pymanopt,pymanopt/pymanopt | import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com> | import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| <commit_before>import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
<commit_msg>Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after> | import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| <commit_before>import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
<commit_msg>Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
|
7abecbcd949278eec4082b733c5d687ba8bf11d4 | random-object-id.py | random-object-id.py | import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId({})'.format(object_id)
else:
print object_id
| import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId("{}")'.format(object_id)
else:
print object_id
| Add quotes to long form output | Add quotes to long form output
| Python | mit | mxr/random-object-id | import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId({})'.format(object_id)
else:
print object_id
Add quotes to long form output | import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId("{}")'.format(object_id)
else:
print object_id
| <commit_before>import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId({})'.format(object_id)
else:
print object_id
<commit_msg>Add quotes to long form output<commit_after> | import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId("{}")'.format(object_id)
else:
print object_id
| import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId({})'.format(object_id)
else:
print object_id
Add quotes to long form outputimport binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId("{}")'.format(object_id)
else:
print object_id
| <commit_before>import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId({})'.format(object_id)
else:
print object_id
<commit_msg>Add quotes to long form output<commit_after>import binascii
import os
import time
from optparse import OptionParser
def gen_random_object_id():
timestamp = '{0:x}'.format(int(time.time()))
rest = binascii.b2a_hex(os.urandom(8))
return timestamp + rest
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-l', '--longform',
action="store_true",
dest="long_form",
help="prints the ID surrounded by ObjectId(...)")
(options, args) = parser.parse_args()
object_id = gen_random_object_id()
if options.long_form:
print 'ObjectId("{}")'.format(object_id)
else:
print object_id
|
0c01cb42527fdc2a094d3cc3f2f99a75da6992fa | geoportailv3/models.py | geoportailv3/models.py | # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
| # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
from pyramid.security import Allow, ALL_PERMISSIONS
from formalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy.types import Integer, Boolean, Unicode
from c2cgeoportal.models import AUTHORIZED_ROLE, _schema
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
class LuxLayerInternalWMS(LayerInternalWMS):
__label__ = _(u'Internal WMS layer')
__plural__ = _(u'Internal WMS layers')
__tablename__ = 'lux_layer_internal_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_int_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_internal_wms.id'),
primary_key=True
)
url = Column(Unicode, label=_(u'Url'))
layers = Column(Unicode, label=_(u'Layers'))
class LuxLayerExternalWMS(LayerExternalWMS):
__label__ = _(u'External WMS layer')
__plural__ = _(u'External WMS layers')
__tablename__ = 'lux_layer_external_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_ext_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_external_wms.id'),
primary_key=True
)
category_id = Column(Integer, label=_(u'Category ID'))
is_poi = Column(Boolean, label=_(u'Is a POI'))
collection_id = Column(Integer, label=_(u'Collection ID'))
class LuxRoleTheme(Base):
__label__ = _(u'LuxRoleTheme')
__plural__ = _(u'LuxRoleTheme')
__tablename__ = 'lux_role_theme'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
theme_id = Column(
Integer,
ForeignKey(_schema + '.theme.id'),
primary_key=True
)
role_id = Column(
Integer,
label=_(u'Role ID'),
primary_key=True
)
| Create the model for project specific tables | Create the model for project specific tables
| Python | mit | Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,geoportallux/geoportailv3-gisgr,geoportallux/geoportailv3-gisgr,geoportallux/geoportailv3-gisgr,Geoportail-Luxembourg/geoportailv3,geoportallux/geoportailv3-gisgr | # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
Create the model for project specific tables | # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
from pyramid.security import Allow, ALL_PERMISSIONS
from formalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy.types import Integer, Boolean, Unicode
from c2cgeoportal.models import AUTHORIZED_ROLE, _schema
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
class LuxLayerInternalWMS(LayerInternalWMS):
__label__ = _(u'Internal WMS layer')
__plural__ = _(u'Internal WMS layers')
__tablename__ = 'lux_layer_internal_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_int_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_internal_wms.id'),
primary_key=True
)
url = Column(Unicode, label=_(u'Url'))
layers = Column(Unicode, label=_(u'Layers'))
class LuxLayerExternalWMS(LayerExternalWMS):
__label__ = _(u'External WMS layer')
__plural__ = _(u'External WMS layers')
__tablename__ = 'lux_layer_external_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_ext_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_external_wms.id'),
primary_key=True
)
category_id = Column(Integer, label=_(u'Category ID'))
is_poi = Column(Boolean, label=_(u'Is a POI'))
collection_id = Column(Integer, label=_(u'Collection ID'))
class LuxRoleTheme(Base):
__label__ = _(u'LuxRoleTheme')
__plural__ = _(u'LuxRoleTheme')
__tablename__ = 'lux_role_theme'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
theme_id = Column(
Integer,
ForeignKey(_schema + '.theme.id'),
primary_key=True
)
role_id = Column(
Integer,
label=_(u'Role ID'),
primary_key=True
)
| <commit_before># -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
<commit_msg>Create the model for project specific tables<commit_after> | # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
from pyramid.security import Allow, ALL_PERMISSIONS
from formalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy.types import Integer, Boolean, Unicode
from c2cgeoportal.models import AUTHORIZED_ROLE, _schema
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
class LuxLayerInternalWMS(LayerInternalWMS):
__label__ = _(u'Internal WMS layer')
__plural__ = _(u'Internal WMS layers')
__tablename__ = 'lux_layer_internal_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_int_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_internal_wms.id'),
primary_key=True
)
url = Column(Unicode, label=_(u'Url'))
layers = Column(Unicode, label=_(u'Layers'))
class LuxLayerExternalWMS(LayerExternalWMS):
__label__ = _(u'External WMS layer')
__plural__ = _(u'External WMS layers')
__tablename__ = 'lux_layer_external_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_ext_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_external_wms.id'),
primary_key=True
)
category_id = Column(Integer, label=_(u'Category ID'))
is_poi = Column(Boolean, label=_(u'Is a POI'))
collection_id = Column(Integer, label=_(u'Collection ID'))
class LuxRoleTheme(Base):
__label__ = _(u'LuxRoleTheme')
__plural__ = _(u'LuxRoleTheme')
__tablename__ = 'lux_role_theme'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
theme_id = Column(
Integer,
ForeignKey(_schema + '.theme.id'),
primary_key=True
)
role_id = Column(
Integer,
label=_(u'Role ID'),
primary_key=True
)
| # -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
Create the model for project specific tables# -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
from pyramid.security import Allow, ALL_PERMISSIONS
from formalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy.types import Integer, Boolean, Unicode
from c2cgeoportal.models import AUTHORIZED_ROLE, _schema
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
class LuxLayerInternalWMS(LayerInternalWMS):
__label__ = _(u'Internal WMS layer')
__plural__ = _(u'Internal WMS layers')
__tablename__ = 'lux_layer_internal_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_int_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_internal_wms.id'),
primary_key=True
)
url = Column(Unicode, label=_(u'Url'))
layers = Column(Unicode, label=_(u'Layers'))
class LuxLayerExternalWMS(LayerExternalWMS):
__label__ = _(u'External WMS layer')
__plural__ = _(u'External WMS layers')
__tablename__ = 'lux_layer_external_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_ext_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_external_wms.id'),
primary_key=True
)
category_id = Column(Integer, label=_(u'Category ID'))
is_poi = Column(Boolean, label=_(u'Is a POI'))
collection_id = Column(Integer, label=_(u'Collection ID'))
class LuxRoleTheme(Base):
__label__ = _(u'LuxRoleTheme')
__plural__ = _(u'LuxRoleTheme')
__tablename__ = 'lux_role_theme'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
theme_id = Column(
Integer,
ForeignKey(_schema + '.theme.id'),
primary_key=True
)
role_id = Column(
Integer,
label=_(u'Role ID'),
primary_key=True
)
| <commit_before># -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
<commit_msg>Create the model for project specific tables<commit_after># -*- coding: utf-8 -*-
import logging
from pyramid.i18n import TranslationStringFactory
from c2cgeoportal.models import * # noqa
from pyramid.security import Allow, ALL_PERMISSIONS
from formalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy.types import Integer, Boolean, Unicode
from c2cgeoportal.models import AUTHORIZED_ROLE, _schema
_ = TranslationStringFactory('geoportailv3')
log = logging.getLogger(__name__)
class LuxLayerInternalWMS(LayerInternalWMS):
__label__ = _(u'Internal WMS layer')
__plural__ = _(u'Internal WMS layers')
__tablename__ = 'lux_layer_internal_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_int_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_internal_wms.id'),
primary_key=True
)
url = Column(Unicode, label=_(u'Url'))
layers = Column(Unicode, label=_(u'Layers'))
class LuxLayerExternalWMS(LayerExternalWMS):
__label__ = _(u'External WMS layer')
__plural__ = _(u'External WMS layers')
__tablename__ = 'lux_layer_external_wms'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
__mapper_args__ = {'polymorphic_identity': 'lu_ext_wms'}
id = Column(
Integer,
ForeignKey(_schema + '.layer_external_wms.id'),
primary_key=True
)
category_id = Column(Integer, label=_(u'Category ID'))
is_poi = Column(Boolean, label=_(u'Is a POI'))
collection_id = Column(Integer, label=_(u'Collection ID'))
class LuxRoleTheme(Base):
__label__ = _(u'LuxRoleTheme')
__plural__ = _(u'LuxRoleTheme')
__tablename__ = 'lux_role_theme'
__table_args__ = {'schema': _schema}
__acl__ = [
(Allow, AUTHORIZED_ROLE, ALL_PERMISSIONS),
]
theme_id = Column(
Integer,
ForeignKey(_schema + '.theme.id'),
primary_key=True
)
role_id = Column(
Integer,
label=_(u'Role ID'),
primary_key=True
)
|
c3284516e8dc2c7fccfbf7e4bff46a66b4ad2f15 | cref/evaluation/__init__.py | cref/evaluation/__init__.py | import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
| import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
| Save output for every run | Save output for every run
| Python | mit | mchelem/cref2,mchelem/cref2,mchelem/cref2 | import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every run | import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
| <commit_before>import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
<commit_msg>Save output for every run<commit_after> | import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
| import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every runimport os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
| <commit_before>import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
<commit_msg>Save output for every run<commit_after>import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
|
76bc5171cbccf9ce171f8891f24b66daa91aef0d | glitter/pages/forms.py | glitter/pages/forms.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from .models import Page
from glitter.integration import glitter_app_pool
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_name, glitter_app in glitter_apps.items():
choices.append((app_name, glitter_app.name))
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from glitter.integration import glitter_app_pool
from .models import Page
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_system_name, glitter_app in glitter_apps.items():
choices.append((app_system_name, glitter_app.name))
# Sort by glitter_app name
choices = sorted(choices, key=lambda x: x[1])
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
| Sort the Glitter app choices for page admin | Sort the Glitter app choices for page admin
For #69
| Python | bsd-3-clause | developersociety/django-glitter,developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from .models import Page
from glitter.integration import glitter_app_pool
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_name, glitter_app in glitter_apps.items():
choices.append((app_name, glitter_app.name))
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
Sort the Glitter app choices for page admin
For #69 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from glitter.integration import glitter_app_pool
from .models import Page
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_system_name, glitter_app in glitter_apps.items():
choices.append((app_system_name, glitter_app.name))
# Sort by glitter_app name
choices = sorted(choices, key=lambda x: x[1])
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from .models import Page
from glitter.integration import glitter_app_pool
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_name, glitter_app in glitter_apps.items():
choices.append((app_name, glitter_app.name))
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
<commit_msg>Sort the Glitter app choices for page admin
For #69<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from glitter.integration import glitter_app_pool
from .models import Page
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_system_name, glitter_app in glitter_apps.items():
choices.append((app_system_name, glitter_app.name))
# Sort by glitter_app name
choices = sorted(choices, key=lambda x: x[1])
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from .models import Page
from glitter.integration import glitter_app_pool
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_name, glitter_app in glitter_apps.items():
choices.append((app_name, glitter_app.name))
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
Sort the Glitter app choices for page admin
For #69# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from glitter.integration import glitter_app_pool
from .models import Page
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_system_name, glitter_app in glitter_apps.items():
choices.append((app_system_name, glitter_app.name))
# Sort by glitter_app name
choices = sorted(choices, key=lambda x: x[1])
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from .models import Page
from glitter.integration import glitter_app_pool
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_name, glitter_app in glitter_apps.items():
choices.append((app_name, glitter_app.name))
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
<commit_msg>Sort the Glitter app choices for page admin
For #69<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from glitter.integration import glitter_app_pool
from .models import Page
class DuplicatePageForm(forms.ModelForm):
class Meta:
model = Page
fields = ['url', 'title', 'parent', 'login_required', 'show_in_navigation']
labels = {
'url': 'New URL',
'title': 'New title',
}
def __init__(self, *args, **kwargs):
if not getattr(settings, 'GLITTER_SHOW_LOGIN_REQUIRED', False):
if 'login_required' in self.Meta.fields:
self.Meta.fields.remove('login_required')
super(DuplicatePageForm, self).__init__(*args, **kwargs)
def get_glitter_app_choices():
glitter_apps = glitter_app_pool.get_glitter_apps()
choices = [('', '(none)')]
for app_system_name, glitter_app in glitter_apps.items():
choices.append((app_system_name, glitter_app.name))
# Sort by glitter_app name
choices = sorted(choices, key=lambda x: x[1])
return choices
class PageAdminForm(forms.ModelForm):
class Meta:
model = Page
widgets = {
'glitter_app_name': forms.widgets.Select(choices=get_glitter_app_choices()),
}
fields = '__all__'
|
09462f834d2c61b106cfa44eb45360c10db47f35 | rtwilio/__init__.py | rtwilio/__init__.py | "Twilio backend for the RapidSMS project."
__version__ = '0.3.0'
| "Twilio backend for the RapidSMS project."
__version__ = '1.0.0dev'
| Develop is now v1.0 dev. | Develop is now v1.0 dev.
| Python | bsd-3-clause | caktus/rapidsms-twilio | "Twilio backend for the RapidSMS project."
__version__ = '0.3.0'
Develop is now v1.0 dev. | "Twilio backend for the RapidSMS project."
__version__ = '1.0.0dev'
| <commit_before>"Twilio backend for the RapidSMS project."
__version__ = '0.3.0'
<commit_msg>Develop is now v1.0 dev.<commit_after> | "Twilio backend for the RapidSMS project."
__version__ = '1.0.0dev'
| "Twilio backend for the RapidSMS project."
__version__ = '0.3.0'
Develop is now v1.0 dev."Twilio backend for the RapidSMS project."
__version__ = '1.0.0dev'
| <commit_before>"Twilio backend for the RapidSMS project."
__version__ = '0.3.0'
<commit_msg>Develop is now v1.0 dev.<commit_after>"Twilio backend for the RapidSMS project."
__version__ = '1.0.0dev'
|
bda36d78984ee8b4701315170f004ed6955072ac | common/widgets.py | common/widgets.py | # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
| # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
| Handle "no file uploaded" situation in FileFieldLink | Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded
| Python | agpl-3.0 | m4tx/egielda,m4tx/egielda,m4tx/egielda | # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded | # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
| <commit_before># This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
<commit_msg>Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded<commit_after> | # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
| # This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded# This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
| <commit_before># This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
return format_html('<a{}><p{}>{}</p></a>',
flatatt({'href': value.url}),
flatatt({'class': 'form-control-static'}),
value.name)
<commit_msg>Handle "no file uploaded" situation in FileFieldLink
Fixes ValueErrors when user has no identity card uploaded<commit_after># This file is part of e-Giełda.
# Copyright (C) 2014 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from django.forms import TextInput, Widget
from django.forms.utils import flatatt
from django.utils.html import format_html
from django.utils.translation import ugettext as _
class PhoneNumberInput(TextInput):
input_type = 'tel'
class FileFieldLink(Widget):
"""
Widget that displays file from FileField as a link to the uploaded data.
"""
def render(self, name, value, attrs=None):
outer_attrs = {}
if attrs:
outer_attrs.update(attrs)
if value:
outer_attrs['href'] = value.url
return format_html('<a{}><p{}>{}</p></a>',
flatatt(outer_attrs),
flatatt({'class': 'form-control-static'}),
value.name)
else:
outer_attrs['class'] = 'form-control-static'
return format_html('<p{}>{}</p>', flatatt(outer_attrs), _("No file uploaded"))
|
41b8cefb881e294b3bcdbb497d21fe1153a25725 | capomastro/urls.py | capomastro/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'ADDITIONAL_URLS'):
urlpatterns += settings.ADDITIONAL_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
| from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'AUTHENTICATION_URLS'):
urlpatterns += settings.AUTHENTICATION_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
| Change from ADDITIONAL_URLS to AUTHENTICATION_URLS | Change from ADDITIONAL_URLS to AUTHENTICATION_URLS
| Python | mit | caio1982/capomastro,caio1982/capomastro,timrchavez/capomastro,timrchavez/capomastro,caio1982/capomastro | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'ADDITIONAL_URLS'):
urlpatterns += settings.ADDITIONAL_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
Change from ADDITIONAL_URLS to AUTHENTICATION_URLS | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'AUTHENTICATION_URLS'):
urlpatterns += settings.AUTHENTICATION_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
| <commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'ADDITIONAL_URLS'):
urlpatterns += settings.ADDITIONAL_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
<commit_msg>Change from ADDITIONAL_URLS to AUTHENTICATION_URLS<commit_after> | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'AUTHENTICATION_URLS'):
urlpatterns += settings.AUTHENTICATION_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
| from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'ADDITIONAL_URLS'):
urlpatterns += settings.ADDITIONAL_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
Change from ADDITIONAL_URLS to AUTHENTICATION_URLSfrom django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'AUTHENTICATION_URLS'):
urlpatterns += settings.AUTHENTICATION_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
| <commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'ADDITIONAL_URLS'):
urlpatterns += settings.ADDITIONAL_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
<commit_msg>Change from ADDITIONAL_URLS to AUTHENTICATION_URLS<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from capomastro.views import HomeView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'capomastro.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', HomeView.as_view(), name='home'),
url(r'^', include('projects.urls')),
url(r'^jenkins/', include('jenkins.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
if hasattr(settings, 'AUTHENTICATION_URLS'):
urlpatterns += settings.AUTHENTICATION_URLS
else:
urlpatterns += url(r'^accounts/', include('django.contrib.auth.urls')),
|
49f506dce441b3a8fb1e2eb0f06c26661721785e | {{cookiecutter.app_name}}/models.py | {{cookiecutter.app_name}}/models.py | from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
| from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
def __str__(self):
return self.name
| Use friendly name for admin | Use friendly name for admin
| Python | mit | rickydunlop/cookiecutter-django-app-template-drf-haystack | from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
Use friendly name for admin | from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
def __str__(self):
return self.name
| <commit_before>from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
<commit_msg>Use friendly name for admin<commit_after> | from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
def __str__(self):
return self.name
| from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
Use friendly name for adminfrom __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
def __str__(self):
return self.name
| <commit_before>from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
<commit_msg>Use friendly name for admin<commit_after>from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django_extensions.db.models import TimeStampedModel
class {{ cookiecutter.model_name }}(TimeStampedModel):
name = models.CharField(
verbose_name=_('name'),
max_length=255,
blank=True,
null=True,
)
def __str__(self):
return self.name
|
b0d24c3aa1bea35afb81ee01fd238c8a263527c9 | scripts/cts-load.py | scripts/cts-load.py | from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start), loc=loc))
return Row(id=(splitext(basename(f[0])))[0], text=text, locs=locs)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)) \
.toDF() \
.write.save(sys.argv[2])
spark.stop()
| from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
id = (splitext(basename(f[0])))[0]
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
parts = loc.split(':')
if len(parts) >= 4: id = ':'.join(parts[0:4])
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start),
loc=sub('([^\.]+\.[^\.]+)\.[^\.]+(:[^:]+)$', '\\1\\2', loc)))
return Row(id=id, locs=locs,
series=sub('([^\.]+\.[^\.]+)\.[^\.]+$', '\\1', id),
text=text)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
# fields get !@#$% alphabetized!
spark.createDataFrame(spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)),
'id: string, locs: array<struct<length: int, loc: string, start: int>>, series: string, text: string') \
.write.save(sys.argv[2])
spark.stop()
| Add series and normalize locs. | Add series and normalize locs.
| Python | apache-2.0 | ViralTexts/vt-passim,ViralTexts/vt-passim,ViralTexts/vt-passim | from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start), loc=loc))
return Row(id=(splitext(basename(f[0])))[0], text=text, locs=locs)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)) \
.toDF() \
.write.save(sys.argv[2])
spark.stop()
Add series and normalize locs. | from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
id = (splitext(basename(f[0])))[0]
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
parts = loc.split(':')
if len(parts) >= 4: id = ':'.join(parts[0:4])
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start),
loc=sub('([^\.]+\.[^\.]+)\.[^\.]+(:[^:]+)$', '\\1\\2', loc)))
return Row(id=id, locs=locs,
series=sub('([^\.]+\.[^\.]+)\.[^\.]+$', '\\1', id),
text=text)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
# fields get !@#$% alphabetized!
spark.createDataFrame(spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)),
'id: string, locs: array<struct<length: int, loc: string, start: int>>, series: string, text: string') \
.write.save(sys.argv[2])
spark.stop()
| <commit_before>from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start), loc=loc))
return Row(id=(splitext(basename(f[0])))[0], text=text, locs=locs)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)) \
.toDF() \
.write.save(sys.argv[2])
spark.stop()
<commit_msg>Add series and normalize locs.<commit_after> | from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
id = (splitext(basename(f[0])))[0]
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
parts = loc.split(':')
if len(parts) >= 4: id = ':'.join(parts[0:4])
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start),
loc=sub('([^\.]+\.[^\.]+)\.[^\.]+(:[^:]+)$', '\\1\\2', loc)))
return Row(id=id, locs=locs,
series=sub('([^\.]+\.[^\.]+)\.[^\.]+$', '\\1', id),
text=text)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
# fields get !@#$% alphabetized!
spark.createDataFrame(spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)),
'id: string, locs: array<struct<length: int, loc: string, start: int>>, series: string, text: string') \
.write.save(sys.argv[2])
spark.stop()
| from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start), loc=loc))
return Row(id=(splitext(basename(f[0])))[0], text=text, locs=locs)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)) \
.toDF() \
.write.save(sys.argv[2])
spark.stop()
Add series and normalize locs.from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
id = (splitext(basename(f[0])))[0]
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
parts = loc.split(':')
if len(parts) >= 4: id = ':'.join(parts[0:4])
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start),
loc=sub('([^\.]+\.[^\.]+)\.[^\.]+(:[^:]+)$', '\\1\\2', loc)))
return Row(id=id, locs=locs,
series=sub('([^\.]+\.[^\.]+)\.[^\.]+$', '\\1', id),
text=text)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
# fields get !@#$% alphabetized!
spark.createDataFrame(spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)),
'id: string, locs: array<struct<length: int, loc: string, start: int>>, series: string, text: string') \
.write.save(sys.argv[2])
spark.stop()
| <commit_before>from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start), loc=loc))
return Row(id=(splitext(basename(f[0])))[0], text=text, locs=locs)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)) \
.toDF() \
.write.save(sys.argv[2])
spark.stop()
<commit_msg>Add series and normalize locs.<commit_after>from __future__ import print_function
from re import sub
import sys
from os.path import basename, splitext
from pyspark.sql import SparkSession, Row
def parseCTS(f):
res = dict()
text = ''
locs = []
id = (splitext(basename(f[0])))[0]
for line in f[1].split('\n'):
if line != '':
(loc, raw) = line.split('\t', 2)
parts = loc.split(':')
if len(parts) >= 4: id = ':'.join(parts[0:4])
start = len(text)
text += sub('\s+', ' ', raw)
text += '\n'
locs.append(Row(start=start, length=(len(text) - start),
loc=sub('([^\.]+\.[^\.]+)\.[^\.]+(:[^:]+)$', '\\1\\2', loc)))
return Row(id=id, locs=locs,
series=sub('([^\.]+\.[^\.]+)\.[^\.]+$', '\\1', id),
text=text)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: cts-load.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Load CTS TSV').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
# fields get !@#$% alphabetized!
spark.createDataFrame(spark.sparkContext.wholeTextFiles(sys.argv[1]) \
.filter(lambda f: f[0].endswith('.cts')) \
.map(lambda f: parseCTS(f)),
'id: string, locs: array<struct<length: int, loc: string, start: int>>, series: string, text: string') \
.write.save(sys.argv[2])
spark.stop()
|
370bc073d56615a5aaa3668ab89d96cdd49ef17d | compare.py | compare.py | """The compare module contains the components you need to
compare values and ensure that your expectations are met.
To make use of this module, you simply import the "expect"
starter into your spec/test file, and specify the expectation
you have about two values.
"""
class Expr(object):
"""Encapsulates a python expression, primitive value or callable
that is to be evaluated and compared to another value.
Serves as the basic construct for describing an expectation.
Generally you would not use this class directly, instead it is
available through the "expect" alias which allows for a more
pythonic syntax.
It initializes with primitives, native types and expressions
>>> e = Expr("Foo")
>>> e.value == "Foo"
True
>>> e = Expr(['a', 'b'])
>>> e.value == ['a', 'b']
True
>>> Expr(4 + 7).value == 11
True
>>> Expr(4 == 7).value == False
True
"""
def __init__(self, value):
self.value = value
| Implement Expr class -- the base of it all. | Implement Expr class -- the base of it all.
| Python | bsd-3-clause | rudylattae/compare,rudylattae/compare | Implement Expr class -- the base of it all. | """The compare module contains the components you need to
compare values and ensure that your expectations are met.
To make use of this module, you simply import the "expect"
starter into your spec/test file, and specify the expectation
you have about two values.
"""
class Expr(object):
"""Encapsulates a python expression, primitive value or callable
that is to be evaluated and compared to another value.
Serves as the basic construct for describing an expectation.
Generally you would not use this class directly, instead it is
available through the "expect" alias which allows for a more
pythonic syntax.
It initializes with primitives, native types and expressions
>>> e = Expr("Foo")
>>> e.value == "Foo"
True
>>> e = Expr(['a', 'b'])
>>> e.value == ['a', 'b']
True
>>> Expr(4 + 7).value == 11
True
>>> Expr(4 == 7).value == False
True
"""
def __init__(self, value):
self.value = value
| <commit_before><commit_msg>Implement Expr class -- the base of it all.<commit_after> | """The compare module contains the components you need to
compare values and ensure that your expectations are met.
To make use of this module, you simply import the "expect"
starter into your spec/test file, and specify the expectation
you have about two values.
"""
class Expr(object):
"""Encapsulates a python expression, primitive value or callable
that is to be evaluated and compared to another value.
Serves as the basic construct for describing an expectation.
Generally you would not use this class directly, instead it is
available through the "expect" alias which allows for a more
pythonic syntax.
It initializes with primitives, native types and expressions
>>> e = Expr("Foo")
>>> e.value == "Foo"
True
>>> e = Expr(['a', 'b'])
>>> e.value == ['a', 'b']
True
>>> Expr(4 + 7).value == 11
True
>>> Expr(4 == 7).value == False
True
"""
def __init__(self, value):
self.value = value
| Implement Expr class -- the base of it all."""The compare module contains the components you need to
compare values and ensure that your expectations are met.
To make use of this module, you simply import the "expect"
starter into your spec/test file, and specify the expectation
you have about two values.
"""
class Expr(object):
"""Encapsulates a python expression, primitive value or callable
that is to be evaluated and compared to another value.
Serves as the basic construct for describing an expectation.
Generally you would not use this class directly, instead it is
available through the "expect" alias which allows for a more
pythonic syntax.
It initializes with primitives, native types and expressions
>>> e = Expr("Foo")
>>> e.value == "Foo"
True
>>> e = Expr(['a', 'b'])
>>> e.value == ['a', 'b']
True
>>> Expr(4 + 7).value == 11
True
>>> Expr(4 == 7).value == False
True
"""
def __init__(self, value):
self.value = value
| <commit_before><commit_msg>Implement Expr class -- the base of it all.<commit_after>"""The compare module contains the components you need to
compare values and ensure that your expectations are met.
To make use of this module, you simply import the "expect"
starter into your spec/test file, and specify the expectation
you have about two values.
"""
class Expr(object):
"""Encapsulates a python expression, primitive value or callable
that is to be evaluated and compared to another value.
Serves as the basic construct for describing an expectation.
Generally you would not use this class directly, instead it is
available through the "expect" alias which allows for a more
pythonic syntax.
It initializes with primitives, native types and expressions
>>> e = Expr("Foo")
>>> e.value == "Foo"
True
>>> e = Expr(['a', 'b'])
>>> e.value == ['a', 'b']
True
>>> Expr(4 + 7).value == 11
True
>>> Expr(4 == 7).value == False
True
"""
def __init__(self, value):
self.value = value
| |
c4fa912acc573f5590510c0345d9a9b3bc40f4c8 | espresso/repl.py | espresso/repl.py | # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
"""
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
| # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self, banner = None):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
""" or banner
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
| Make EspressoConsole.interact conform to InteractiveConsole.interact | Make EspressoConsole.interact conform to InteractiveConsole.interact
| Python | bsd-3-clause | ratchetrobotics/espresso | # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
"""
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
Make EspressoConsole.interact conform to InteractiveConsole.interact | # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self, banner = None):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
""" or banner
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
| <commit_before># -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
"""
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
<commit_msg>Make EspressoConsole.interact conform to InteractiveConsole.interact<commit_after> | # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self, banner = None):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
""" or banner
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
| # -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
"""
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
Make EspressoConsole.interact conform to InteractiveConsole.interact# -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self, banner = None):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
""" or banner
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
| <commit_before># -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
"""
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
<commit_msg>Make EspressoConsole.interact conform to InteractiveConsole.interact<commit_after># -*- coding: utf-8 -*-
from code import InteractiveConsole
class EspressoConsole(InteractiveConsole, object):
def interact(self, banner = None):
banner = """███████╗███████╗██████╗ ██████╗ ███████╗███████╗███████╗ ██████╗
██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝██╔════╝██╔════╝██╔═══██╗
█████╗ ███████╗██████╔╝██████╔╝█████╗ ███████╗███████╗██║ ██║
██╔══╝ ╚════██║██╔═══╝ ██╔══██╗██╔══╝ ╚════██║╚════██║██║ ██║
███████╗███████║██║ ██║ ██║███████╗███████║███████║╚██████╔╝
╚══════╝╚══════╝╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝ ╚═════╝
""" or banner
super(EspressoConsole, self).interact(banner)
def raw_input(self, prompt=''):
prompt = '[=>]'
return super(EspressoConsole, self).raw_input(prompt)
|
bd181f778e74bbd070fd4f46329ad5c8dc637ea7 | zendesk_tickets_machine/tickets/services.py | zendesk_tickets_machine/tickets/services.py | import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
| import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
| Adjust code style to reduce lines of code :bear: | Adjust code style to reduce lines of code :bear:
| Python | mit | prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine | import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
Adjust code style to reduce lines of code :bear: | import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
| <commit_before>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
<commit_msg>Adjust code style to reduce lines of code :bear:<commit_after> | import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
| import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
Adjust code style to reduce lines of code :bear:import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
| <commit_before>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(
pk__in=id_list
).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(
pk__in=id_list
).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(
pk__in=id_list
).update(requester=edit_requester)
if edit_due_at:
Ticket.objects.filter(
pk__in=id_list
).update(
due_at=datetime.datetime.strptime(
edit_due_at, "%m/%d/%Y"
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(
pk__in=id_list
).update(assignee=edit_assignee)
<commit_msg>Adjust code style to reduce lines of code :bear:<commit_after>import datetime
from django.utils.timezone import utc
from .models import Ticket
class TicketServices():
def edit_ticket_once(self, **kwargs):
id_list = kwargs.get('id_list')
edit_tags = kwargs.get('edit_tags')
edit_requester = kwargs.get('edit_requester')
edit_subject = kwargs.get('edit_subject')
edit_due_at = kwargs.get('edit_due_at')
edit_assignee = kwargs.get('edit_assignee')
if edit_tags:
Ticket.objects.filter(pk__in=id_list).update(tags=edit_tags)
if edit_subject:
Ticket.objects.filter(pk__in=id_list).update(subject=edit_subject)
if edit_requester:
Ticket.objects.filter(pk__in=id_list).update(
requester=edit_requester
)
if edit_due_at:
Ticket.objects.filter(pk__in=id_list).update(
due_at=datetime.datetime.strptime(
edit_due_at, '%m/%d/%Y'
).replace(tzinfo=utc)
)
if edit_assignee:
Ticket.objects.filter(pk__in=id_list).update(
assignee=edit_assignee
)
|
69df0f5148b998cc7757405b9965200276ce55b9 | fireplace/cards/league/adventure.py | fireplace/cards/league/adventure.py | from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
| from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
| Implement Temple Escape event choices | Implement Temple Escape event choices
| Python | agpl-3.0 | beheh/fireplace,NightKev/fireplace,jleclanche/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,Ragowit/fireplace,Ragowit/fireplace | from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
Implement Temple Escape event choices | from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
| <commit_before>from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
<commit_msg>Implement Temple Escape event choices<commit_after> | from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
| from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
Implement Temple Escape event choicesfrom ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
| <commit_before>from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
<commit_msg>Implement Temple Escape event choices<commit_after>from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
|
5c681567c359c76e9e323a82ab9162f5098b6421 | measurator/main.py | measurator/main.py | def run_main():
pass
| import argparse
def run_main():
path = file_path()
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
| Add mandatory argument: path to file | Add mandatory argument: path to file
| Python | mit | ahitrin-attic/measurator-proto | def run_main():
pass
Add mandatory argument: path to file | import argparse
def run_main():
path = file_path()
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
| <commit_before>def run_main():
pass
<commit_msg>Add mandatory argument: path to file<commit_after> | import argparse
def run_main():
path = file_path()
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
| def run_main():
pass
Add mandatory argument: path to fileimport argparse
def run_main():
path = file_path()
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
| <commit_before>def run_main():
pass
<commit_msg>Add mandatory argument: path to file<commit_after>import argparse
def run_main():
path = file_path()
def file_path():
parser = argparse.ArgumentParser()
parser.add_argument("path")
args = parser.parse_args()
return args.path
|
509669de3b61f7f67c5c3603f696b06ad759a7b3 | mopidy/internal/gi.py | mopidy/internal/gi.py | import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
| import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see https://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
| Use https for docs URL | Use https for docs URL
| Python | apache-2.0 | adamcik/mopidy,mopidy/mopidy,jodal/mopidy,jcass77/mopidy,mopidy/mopidy,jodal/mopidy,mopidy/mopidy,kingosticks/mopidy,adamcik/mopidy,kingosticks/mopidy,kingosticks/mopidy,jodal/mopidy,adamcik/mopidy,jcass77/mopidy,jcass77/mopidy | import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
Use https for docs URL | import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see https://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
| <commit_before>import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
<commit_msg>Use https for docs URL<commit_after> | import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see https://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
| import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
Use https for docs URLimport sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see https://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
| <commit_before>import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
<commit_msg>Use https for docs URL<commit_after>import sys
import textwrap
try:
import gi
gi.require_version("Gst", "1.0")
from gi.repository import GLib, GObject, Gst
except ImportError:
print(
textwrap.dedent(
"""
ERROR: A GObject based library was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see https://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""
)
)
raise
else:
Gst.init([])
gi.require_version("GstPbutils", "1.0")
from gi.repository import GstPbutils
GLib.set_prgname("mopidy")
GLib.set_application_name("Mopidy")
REQUIRED_GST_VERSION = (1, 14, 0)
REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION))
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
f"ERROR: Mopidy requires GStreamer >= {REQUIRED_GST_VERSION_DISPLAY}, "
f"but found {Gst.version_string()}."
)
__all__ = [
"GLib",
"GObject",
"Gst",
"GstPbutils",
"gi",
]
|
418357ead146a98f2318af6c76323e2705b79cec | cvloop/__init__.py | cvloop/__init__.py | """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| Revert unnecessary change to original | Revert unnecessary change to original
| Python | mit | shoeffner/cvloop | """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
Revert unnecessary change to original | """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| <commit_before>"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
<commit_msg>Revert unnecessary change to original<commit_after> | """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
Revert unnecessary change to original"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| <commit_before>"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
<commit_msg>Revert unnecessary change to original<commit_after>"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
|
c8152d1ce0c9f83460da3d384a532d6d064d6543 | cross_site_urls/urlresolvers.py | cross_site_urls/urlresolvers.py | # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language()
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
| # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None, language=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language() if language is None else language
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
| Add a new settings allowing to set manually the language code of the url resolve when calling the resolver | FEAT(Resolvers): Add a new settings allowing to set manually the language code of the url resolve when calling the resolver
| Python | bsd-3-clause | kapt-labs/django-cross-site-urls,kapt-labs/django-cross-site-urls | # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language()
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
FEAT(Resolvers): Add a new settings allowing to set manually the language code of the url resolve when calling the resolver | # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None, language=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language() if language is None else language
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
| <commit_before># -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language()
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
<commit_msg>FEAT(Resolvers): Add a new settings allowing to set manually the language code of the url resolve when calling the resolver<commit_after> | # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None, language=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language() if language is None else language
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
| # -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language()
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
FEAT(Resolvers): Add a new settings allowing to set manually the language code of the url resolve when calling the resolver# -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None, language=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language() if language is None else language
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
| <commit_before># -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language()
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
<commit_msg>FEAT(Resolvers): Add a new settings allowing to set manually the language code of the url resolve when calling the resolver<commit_after># -*- coding:utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
import uuid
import requests
from django.core.exceptions import ImproperlyConfigured
from django.utils import translation
import slumber
from .conf import settings as local_settings
from .encoding import prefix_kwargs
from .utils import get_api_url
from .constants import RESOLVE_API_VIEW_URL
def resolve_url(site_id, view_name, args=None, kwargs=None, language=None):
if site_id not in local_settings.SITES:
raise ImproperlyConfigured("[Cross site] Configuration error: The given site identifier is not configured in the settings")
site_conf = local_settings.SITES[site_id]
language = translation.get_language() if language is None else language
resolve_args = {'view_name': view_name,
'args': args,
'language': language}
if kwargs:
kwargs_prefix = uuid.uuid4()
resolve_args["kwargs_prefix"] = kwargs_prefix
prefixed_kwargs = prefix_kwargs(kwargs_prefix, kwargs)
resolve_args.update(prefixed_kwargs)
api_url = get_api_url(site_conf["scheme"],
site_conf["domain"])
session = requests.Session()
session.verify = local_settings.VERIFY_SSL_CERT
api = slumber.API(api_url, session=session, auth=local_settings.API_AUTH_CREDENTIALS)
resolver = RESOLVE_API_VIEW_URL.replace('/', '')
url = getattr(api, resolver).get(**resolve_args)['url']
return url
|
0e4db0303d4a8212a91082ace75df95fd440bbfa | server/app.py | server/app.py | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return 'Image Saved'
return 'Dafuq? No File!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
imgData = request.form['file']
if imgData:
filename = secure_filename("pic.jpg")
with open(os.path.join(app.config['UPLOAD_FOLDER'], filename), "wb") as fh:
fh.write(imgData.decode('base64'))
return 'Image Saved'
return 'Dafuq? No Image Data Sent!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) | Save data sent from phone | Save data sent from phone
| Python | mit | navinpai/LMTAS,navinpai/LMTAS,navinpai/LMTAS | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return 'Image Saved'
return 'Dafuq? No File!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337)Save data sent from phone | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
imgData = request.form['file']
if imgData:
filename = secure_filename("pic.jpg")
with open(os.path.join(app.config['UPLOAD_FOLDER'], filename), "wb") as fh:
fh.write(imgData.decode('base64'))
return 'Image Saved'
return 'Dafuq? No Image Data Sent!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) | <commit_before>from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return 'Image Saved'
return 'Dafuq? No File!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337)<commit_msg>Save data sent from phone<commit_after> | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
imgData = request.form['file']
if imgData:
filename = secure_filename("pic.jpg")
with open(os.path.join(app.config['UPLOAD_FOLDER'], filename), "wb") as fh:
fh.write(imgData.decode('base64'))
return 'Image Saved'
return 'Dafuq? No Image Data Sent!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) | from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return 'Image Saved'
return 'Dafuq? No File!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337)Save data sent from phonefrom flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
imgData = request.form['file']
if imgData:
filename = secure_filename("pic.jpg")
with open(os.path.join(app.config['UPLOAD_FOLDER'], filename), "wb") as fh:
fh.write(imgData.decode('base64'))
return 'Image Saved'
return 'Dafuq? No Image Data Sent!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) | <commit_before>from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
file = request.files['file']
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return 'Image Saved'
return 'Dafuq? No File!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337)<commit_msg>Save data sent from phone<commit_after>from flask import Flask, request
from werkzeug.utils import secure_filename
import os
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'uploads/'
@app.route('/')
def hello_world():
return 'Team FifthEye!'
@app.route('/upload', methods=['POST'])
def upload():
imgData = request.form['file']
if imgData:
filename = secure_filename("pic.jpg")
with open(os.path.join(app.config['UPLOAD_FOLDER'], filename), "wb") as fh:
fh.write(imgData.decode('base64'))
return 'Image Saved'
return 'Dafuq? No Image Data Sent!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=1337) |
3629e58c47941965406372cb2d3b52a3fdbadfc2 | ckanext/tayside/logic/action/get.py | ckanext/tayside/logic/action/get.py | from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
| from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
| Handle logic for extras for dataset | Handle logic for extras for dataset
| Python | agpl-3.0 | ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside | from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
Handle logic for extras for dataset | from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
| <commit_before>from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
<commit_msg>Handle logic for extras for dataset<commit_after> | from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
| from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
Handle logic for extras for datasetfrom ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
| <commit_before>from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
<commit_msg>Handle logic for extras for dataset<commit_after>from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
|
e22bf1a54d8b532f0a417221b04e382e71b29186 | LiSE/LiSE/tests/test_examples.py | LiSE/LiSE/tests/test_examples.py | from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
| from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
| Add a test to catch that load error next time | Add a test to catch that load error next time
| Python | agpl-3.0 | LogicalDash/LiSE,LogicalDash/LiSE | from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
Add a test to catch that load error next time | from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
| <commit_before>from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
<commit_msg>Add a test to catch that load error next time<commit_after> | from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
| from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
Add a test to catch that load error next timefrom LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
| <commit_before>from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
<commit_msg>Add a test to catch that load error next time<commit_after>from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
11cbeb3d0140e79fc0bedf5039a3c70f626062eb | condor/python/resync_dashboards.py | condor/python/resync_dashboards.py | #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(results)
| #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(str(results))
| Convert results to a string before printing | Convert results to a string before printing
| Python | apache-2.0 | DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs | #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(results)
Convert results to a string before printing | #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(str(results))
| <commit_before>#!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(results)
<commit_msg>Convert results to a string before printing<commit_after> | #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(str(results))
| #!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(results)
Convert results to a string before printing#!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(str(results))
| <commit_before>#!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(results)
<commit_msg>Convert results to a string before printing<commit_after>#!/usr/bin/env python
import argparse
import sys
import logging
import elasticsearch
import elasticsearch.helpers
ES_NODES = 'uct2-es-door.mwt2.org'
VERSION = '0.1'
SOURCE_INDEX = '.kibana'
TARGET_INDEX = 'osg-connect-kibana'
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Reindex events from ' +
'.kibana ' +
'to osg-connect-kibana')
args = parser.parse_args(sys.argv[1:])
client = get_es_client()
results = elasticsearch.helpers.reindex(client,
SOURCE_INDEX,
TARGET_INDEX,
scroll='30m')
sys.stdout.write(str(results))
|
73d0225b64ec82c7a8142dbac023be499b41fe0f | figures.py | figures.py | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| Make the python script silent | Make the python script silent
| Python | mit | PoisotLab/PLMT | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
Make the python script silent | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| <commit_before>#! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
<commit_msg>Make the python script silent<commit_after> | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
Make the python script silent#! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| <commit_before>#! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
<commit_msg>Make the python script silent<commit_after>#! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
|
b597956cd427a3b830a498c69602753ce6117119 | chrome/test/chromeos/autotest/files/client/site_tests/desktopui_SyncIntegrationTests/desktopui_SyncIntegrationTests.py | chrome/test/chromeos/autotest/files/client/site_tests/desktopui_SyncIntegrationTests/desktopui_SyncIntegrationTests.py | # Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
| # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
| Make the sync integration tests self-contained on autotest | Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | dednal/chromium.src,Jonekee/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,keishi/chromium,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,dushu1203/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,hujiajie/pa-chromium,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,hujiajie/pa-chromium,jaruba/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,dushu1203/chromium.src,dednal/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,littlstar/chromium.src,chuan9/chromium-crosswalk,patrickm/chromium.src,hujiajie/pa-chromium,rogerwang/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,keishi/chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,krieger-od/nwjs_chromium.src,robclark/chromium,ChromiumWebApps/chromium,anirudhSK/chromium,Chilledheart/chromium,dushu1203/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,robclark/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,ltilve/chromium,M4sse/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,hujiajie/pa-chromium,ondra-novak/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,axinging/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,M4sse/chromium.src,rogerwang/chromium,robclark/chromium,anirudhSK/chromium,zcbenz/cefode-chromium,keishi/chromium,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,Just-D/chromium-1,nacl-webkit/chrome_deps,M4sse/chromium.src,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,littlstar/chromium.src,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,zcbenz/cefode-chromium,markYoungH/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,dednal/chromium.src,jaruba/chromium.src,timopulkkinen/BubbleFish,rogerwang/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,rogerwang/chromium,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,Just-D/chromium-1,littlstar/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,mogoweb/chromium-crosswalk,littlstar/chromium.src,keishi/chromium,Just-D/chromium-1,robclark/chromium,robclark/chromium,Chilledheart/chromium,littlstar/chromium.src,rogerwang/chromium,keishi/chromium,rogerwang/chromium,keishi/chromium,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,markYoungH/chromium.src,zcbenz/cefode-chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,keishi/chromium,timopulkkinen/BubbleFish,jaruba/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,Just-D/chromium-1,mogoweb/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,keishi/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Just-D/chromium-1,patrickm/chromium.src,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,anirudhSK/chromium,jaruba/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,rogerwang/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,robclark/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,Chilledheart/chromium,ltilve/chromium,M4sse/chromium.src,rogerwang/chromium,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,rogerwang/chromium,dushu1203/chromium.src,M4sse/chromium.src,robclark/chromium,PeterWangIntel/chromium-crosswalk,robclark/chromium,dednal/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,axinging/chromium-crosswalk,Just-D/chromium-1,ondra-novak/chromium.src,robclark/chromium,nacl-webkit/chrome_deps,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,keishi/chromium,hujiajie/pa-chromium,patrickm/chromium.src,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk,robclark/chromium,Just-D/chromium-1,timopulkkinen/BubbleFish,Chilledheart/chromium,bright-sparks/chromium-spacewalk,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,timopulkkinen/BubbleFish,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,keishi/chromium,dushu1203/chromium.src,Chilledheart/chromium,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,Jonekee/chromium.src,littlstar/chromium.src | # Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
| <commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
<commit_msg>Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
| # Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
| <commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
<commit_msg>Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
5bcb267761e6c2694111757ee4fcf2a050f6c556 | byceps/blueprints/site/guest_server/forms.py | byceps/blueprints/site/guest_server/forms.py | """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Optional
from ....util.l10n import LocalizedForm
class CreateForm(LocalizedForm):
hostname = StringField(lazy_gettext('Hostname'), validators=[Optional()])
notes = TextAreaField(lazy_gettext('Notes'), validators=[Optional()])
| """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Length, Optional, Regexp
from ....util.l10n import LocalizedForm
HOSTNAME_REGEX = re.compile('^[a-z][a-z0-9-\.]+$')
class CreateForm(LocalizedForm):
hostname = StringField(
lazy_gettext('Hostname'),
validators=[Optional(), Length(max=32), Regexp(HOSTNAME_REGEX)],
)
notes = TextAreaField(
lazy_gettext('Notes'), validators=[Optional(), Length(max=1000)]
)
| Make guest server form validation more strict | Make guest server form validation more strict
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Optional
from ....util.l10n import LocalizedForm
class CreateForm(LocalizedForm):
hostname = StringField(lazy_gettext('Hostname'), validators=[Optional()])
notes = TextAreaField(lazy_gettext('Notes'), validators=[Optional()])
Make guest server form validation more strict | """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Length, Optional, Regexp
from ....util.l10n import LocalizedForm
HOSTNAME_REGEX = re.compile('^[a-z][a-z0-9-\.]+$')
class CreateForm(LocalizedForm):
hostname = StringField(
lazy_gettext('Hostname'),
validators=[Optional(), Length(max=32), Regexp(HOSTNAME_REGEX)],
)
notes = TextAreaField(
lazy_gettext('Notes'), validators=[Optional(), Length(max=1000)]
)
| <commit_before>"""
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Optional
from ....util.l10n import LocalizedForm
class CreateForm(LocalizedForm):
hostname = StringField(lazy_gettext('Hostname'), validators=[Optional()])
notes = TextAreaField(lazy_gettext('Notes'), validators=[Optional()])
<commit_msg>Make guest server form validation more strict<commit_after> | """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Length, Optional, Regexp
from ....util.l10n import LocalizedForm
HOSTNAME_REGEX = re.compile('^[a-z][a-z0-9-\.]+$')
class CreateForm(LocalizedForm):
hostname = StringField(
lazy_gettext('Hostname'),
validators=[Optional(), Length(max=32), Regexp(HOSTNAME_REGEX)],
)
notes = TextAreaField(
lazy_gettext('Notes'), validators=[Optional(), Length(max=1000)]
)
| """
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Optional
from ....util.l10n import LocalizedForm
class CreateForm(LocalizedForm):
hostname = StringField(lazy_gettext('Hostname'), validators=[Optional()])
notes = TextAreaField(lazy_gettext('Notes'), validators=[Optional()])
Make guest server form validation more strict"""
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Length, Optional, Regexp
from ....util.l10n import LocalizedForm
HOSTNAME_REGEX = re.compile('^[a-z][a-z0-9-\.]+$')
class CreateForm(LocalizedForm):
hostname = StringField(
lazy_gettext('Hostname'),
validators=[Optional(), Length(max=32), Regexp(HOSTNAME_REGEX)],
)
notes = TextAreaField(
lazy_gettext('Notes'), validators=[Optional(), Length(max=1000)]
)
| <commit_before>"""
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Optional
from ....util.l10n import LocalizedForm
class CreateForm(LocalizedForm):
hostname = StringField(lazy_gettext('Hostname'), validators=[Optional()])
notes = TextAreaField(lazy_gettext('Notes'), validators=[Optional()])
<commit_msg>Make guest server form validation more strict<commit_after>"""
byceps.blueprints.site.guest_server.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import StringField, TextAreaField
from wtforms.validators import Length, Optional, Regexp
from ....util.l10n import LocalizedForm
HOSTNAME_REGEX = re.compile('^[a-z][a-z0-9-\.]+$')
class CreateForm(LocalizedForm):
hostname = StringField(
lazy_gettext('Hostname'),
validators=[Optional(), Length(max=32), Regexp(HOSTNAME_REGEX)],
)
notes = TextAreaField(
lazy_gettext('Notes'), validators=[Optional(), Length(max=1000)]
)
|
5b45d4996de8c15dfc09905b0e63651fdbb2fcc6 | angr/engines/soot/expressions/phi.py | angr/engines/soot/expressions/phi.py |
from .base import SimSootExpr
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
if len(self.expr.values) != 2:
import ipdb; ipdb.set_trace();
v1, v2 = [self._translate_value(v) for v in self.expr.values]
v = self.expr = self.state.memory.load(v1, none_if_missing=True)
if v is None:
v = self.expr = self.state.memory.load(v2, none_if_missing=True)
if v is None:
import ipdb; ipdb.set_trace();
self.expr = v
|
from .base import SimSootExpr
import logging
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
locals_option = [self._translate_value(v) for v in self.expr.values]
values = []
for local in locals_option:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
values.append(value)
if len(values) == 0:
l.warning("Couldn't find a value of Phi expression in memory.")
return
if len(values) > 2:
l.warning("Found multiple values of Phi expression in memory.")
self.expr = values[-1]
| Extend Phi expression to work with more than 2 values | Extend Phi expression to work with more than 2 values
| Python | bsd-2-clause | schieb/angr,iamahuman/angr,angr/angr,schieb/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr,iamahuman/angr |
from .base import SimSootExpr
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
if len(self.expr.values) != 2:
import ipdb; ipdb.set_trace();
v1, v2 = [self._translate_value(v) for v in self.expr.values]
v = self.expr = self.state.memory.load(v1, none_if_missing=True)
if v is None:
v = self.expr = self.state.memory.load(v2, none_if_missing=True)
if v is None:
import ipdb; ipdb.set_trace();
self.expr = v
Extend Phi expression to work with more than 2 values |
from .base import SimSootExpr
import logging
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
locals_option = [self._translate_value(v) for v in self.expr.values]
values = []
for local in locals_option:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
values.append(value)
if len(values) == 0:
l.warning("Couldn't find a value of Phi expression in memory.")
return
if len(values) > 2:
l.warning("Found multiple values of Phi expression in memory.")
self.expr = values[-1]
| <commit_before>
from .base import SimSootExpr
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
if len(self.expr.values) != 2:
import ipdb; ipdb.set_trace();
v1, v2 = [self._translate_value(v) for v in self.expr.values]
v = self.expr = self.state.memory.load(v1, none_if_missing=True)
if v is None:
v = self.expr = self.state.memory.load(v2, none_if_missing=True)
if v is None:
import ipdb; ipdb.set_trace();
self.expr = v
<commit_msg>Extend Phi expression to work with more than 2 values<commit_after> |
from .base import SimSootExpr
import logging
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
locals_option = [self._translate_value(v) for v in self.expr.values]
values = []
for local in locals_option:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
values.append(value)
if len(values) == 0:
l.warning("Couldn't find a value of Phi expression in memory.")
return
if len(values) > 2:
l.warning("Found multiple values of Phi expression in memory.")
self.expr = values[-1]
|
from .base import SimSootExpr
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
if len(self.expr.values) != 2:
import ipdb; ipdb.set_trace();
v1, v2 = [self._translate_value(v) for v in self.expr.values]
v = self.expr = self.state.memory.load(v1, none_if_missing=True)
if v is None:
v = self.expr = self.state.memory.load(v2, none_if_missing=True)
if v is None:
import ipdb; ipdb.set_trace();
self.expr = v
Extend Phi expression to work with more than 2 values
from .base import SimSootExpr
import logging
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
locals_option = [self._translate_value(v) for v in self.expr.values]
values = []
for local in locals_option:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
values.append(value)
if len(values) == 0:
l.warning("Couldn't find a value of Phi expression in memory.")
return
if len(values) > 2:
l.warning("Found multiple values of Phi expression in memory.")
self.expr = values[-1]
| <commit_before>
from .base import SimSootExpr
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
if len(self.expr.values) != 2:
import ipdb; ipdb.set_trace();
v1, v2 = [self._translate_value(v) for v in self.expr.values]
v = self.expr = self.state.memory.load(v1, none_if_missing=True)
if v is None:
v = self.expr = self.state.memory.load(v2, none_if_missing=True)
if v is None:
import ipdb; ipdb.set_trace();
self.expr = v
<commit_msg>Extend Phi expression to work with more than 2 values<commit_after>
from .base import SimSootExpr
import logging
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def __init__(self, expr, state):
super(SimSootExpr_Phi, self).__init__(expr, state)
def _execute(self):
locals_option = [self._translate_value(v) for v in self.expr.values]
values = []
for local in locals_option:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
values.append(value)
if len(values) == 0:
l.warning("Couldn't find a value of Phi expression in memory.")
return
if len(values) > 2:
l.warning("Found multiple values of Phi expression in memory.")
self.expr = values[-1]
|
e0cb864f19f05f4ddfed0fa90c8b9895bde9b8df | caminae/core/management/__init__.py | caminae/core/management/__init__.py | """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
import os
from django.db import connection, transaction, models
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)),
os.path.join(app_dir, "%s.sql" % app_label)]
cursor = connection.cursor()
for sql_file in sql_files:
try:
if os.path.exists(sql_file):
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
| """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
import os
import re
from django.db import connection, transaction, models
app_label = kwargs.get('app')
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
if not os.path.exists(app_dir):
return
r = re.compile(r'^.*\.sql$')
sql_files = [os.path.join(app_dir, f)
for f in os.listdir(app_dir)
if r.match(f) is not None]
sql_files.sort()
cursor = connection.cursor()
for sql_file in sql_files:
try:
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
| Enable loading of SQL scripts with arbitrary name | Enable loading of SQL scripts with arbitrary name
| Python | bsd-2-clause | Anaethelion/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,mabhub/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek | """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
import os
from django.db import connection, transaction, models
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)),
os.path.join(app_dir, "%s.sql" % app_label)]
cursor = connection.cursor()
for sql_file in sql_files:
try:
if os.path.exists(sql_file):
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
Enable loading of SQL scripts with arbitrary name | """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
import os
import re
from django.db import connection, transaction, models
app_label = kwargs.get('app')
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
if not os.path.exists(app_dir):
return
r = re.compile(r'^.*\.sql$')
sql_files = [os.path.join(app_dir, f)
for f in os.listdir(app_dir)
if r.match(f) is not None]
sql_files.sort()
cursor = connection.cursor()
for sql_file in sql_files:
try:
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
| <commit_before>"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
import os
from django.db import connection, transaction, models
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)),
os.path.join(app_dir, "%s.sql" % app_label)]
cursor = connection.cursor()
for sql_file in sql_files:
try:
if os.path.exists(sql_file):
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
<commit_msg>Enable loading of SQL scripts with arbitrary name<commit_after> | """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
import os
import re
from django.db import connection, transaction, models
app_label = kwargs.get('app')
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
if not os.path.exists(app_dir):
return
r = re.compile(r'^.*\.sql$')
sql_files = [os.path.join(app_dir, f)
for f in os.listdir(app_dir)
if r.match(f) is not None]
sql_files.sort()
cursor = connection.cursor()
for sql_file in sql_files:
try:
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
| """
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
import os
from django.db import connection, transaction, models
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)),
os.path.join(app_dir, "%s.sql" % app_label)]
cursor = connection.cursor()
for sql_file in sql_files:
try:
if os.path.exists(sql_file):
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
Enable loading of SQL scripts with arbitrary name"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
import os
import re
from django.db import connection, transaction, models
app_label = kwargs.get('app')
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
if not os.path.exists(app_dir):
return
r = re.compile(r'^.*\.sql$')
sql_files = [os.path.join(app_dir, f)
for f in os.listdir(app_dir)
if r.match(f) is not None]
sql_files.sort()
cursor = connection.cursor()
for sql_file in sql_files:
try:
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
| <commit_before>"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
import os
from django.db import connection, transaction, models
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)),
os.path.join(app_dir, "%s.sql" % app_label)]
cursor = connection.cursor()
for sql_file in sql_files:
try:
if os.path.exists(sql_file):
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
<commit_msg>Enable loading of SQL scripts with arbitrary name<commit_after>"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
import logging
import traceback
from south.signals import post_migrate
logger = logging.getLogger(__name__)
def run_initial_sql(sender, **kwargs):
import os
import re
from django.db import connection, transaction, models
app_label = kwargs.get('app')
app_dir = os.path.normpath(os.path.join(os.path.dirname(
models.get_app(app_label).__file__), 'sql'))
if not os.path.exists(app_dir):
return
r = re.compile(r'^.*\.sql$')
sql_files = [os.path.join(app_dir, f)
for f in os.listdir(app_dir)
if r.match(f) is not None]
sql_files.sort()
cursor = connection.cursor()
for sql_file in sql_files:
try:
logger.info("Loading initial SQL data from '%s'" % sql_file)
f = open(sql_file)
sql = f.read()
f.close()
cursor.execute(sql)
except Exception, e:
logger.error("Failed to install custom SQL file '%s': %s\n" %
(sql_file, e))
traceback.print_exc()
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
post_migrate.connect(run_initial_sql)
|
2834a22489ebe801743434dcf26e727448355756 | corehq/messaging/scheduling/scheduling_partitioned/migrations/0009_update_custom_recipient_ids.py | corehq/messaging/scheduling/scheduling_partitioned/migrations/0009_update_custom_recipient_ids.py | # Generated by Django 2.2.24 on 2021-11-19 14:36
from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
| from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
| Remove date from migration since this one is copied and edited | Remove date from migration since this one is copied and edited
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | # Generated by Django 2.2.24 on 2021-11-19 14:36
from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
Remove date from migration since this one is copied and edited | from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
| <commit_before># Generated by Django 2.2.24 on 2021-11-19 14:36
from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
<commit_msg>Remove date from migration since this one is copied and edited<commit_after> | from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
| # Generated by Django 2.2.24 on 2021-11-19 14:36
from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
Remove date from migration since this one is copied and editedfrom django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
| <commit_before># Generated by Django 2.2.24 on 2021-11-19 14:36
from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
<commit_msg>Remove date from migration since this one is copied and edited<commit_after>from django.db import migrations
from corehq.messaging.scheduling.scheduling_partitioned.models import CaseTimedScheduleInstance
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
def update_custom_recipient_ids(*args, **kwargs):
for db in get_db_aliases_for_partitioned_query():
CaseTimedScheduleInstance.objects.using(db).filter(recipient_id="CASE_OWNER_LOCATION_PARENT").update(
recipient_id='MOBILE_WORKER_CASE_OWNER_LOCATION_PARENT'
)
class Migration(migrations.Migration):
dependencies = [
('scheduling_partitioned', '0008_track_attempts'),
]
operations = [migrations.RunPython(update_custom_recipient_ids)]
|
2cecc2e197e4a4089e29b350179103e323136268 | ddb_ngsflow/variation/sv/itdseek.py | ddb_ngsflow/variation/sv/itdseek.py | """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name, samples):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples configuration dictionary.
:type config: dict.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
| """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
| Remove unneeded samples config passing | Remove unneeded samples config passing
| Python | mit | dgaston/ddb-ngsflow,dgaston/ddbio-ngsflow | """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name, samples):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples configuration dictionary.
:type config: dict.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
Remove unneeded samples config passing | """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
| <commit_before>"""
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name, samples):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples configuration dictionary.
:type config: dict.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
<commit_msg>Remove unneeded samples config passing<commit_after> | """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
| """
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name, samples):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples configuration dictionary.
:type config: dict.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
Remove unneeded samples config passing"""
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
| <commit_before>"""
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name, samples):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples configuration dictionary.
:type config: dict.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
<commit_msg>Remove unneeded samples config passing<commit_after>"""
.. module:: freebayes
:platform: Unix, OSX
:synopsis: A wrapper module for calling ScanIndel.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def run_flt3_itdseek(job, config, name):
"""Run ITDseek without a matched normal sample
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:returns: str -- The output vcf file name.
"""
itdseek_vcf = "{}.flt3.itdseek.vcf".format(name)
itdseek_logfile = "{}.flt3.itdseek.log".format(name)
itdseek_command = ["{}".format(config['itdseek']['bin']),
"{}.rg.sorted.bam".format(name),
"{}".format(config['reference']),
"{}".format(config['samtools-0.19']['bin']),
">",
"{}".format(itdseek_vcf)]
job.fileStore.logToMaster("ITDSeek Command: {}\n".format(itdseek_command))
pipeline.run_and_log_command(" ".join(itdseek_command), itdseek_logfile)
return itdseek_vcf
|
3ab5b791494111a3b0d962b8b5de588665498653 | airpy/main.py | airpy/main.py | import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul™"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main() | import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()
| Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2 | Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2
| Python | mit | kevinaloys/airpy | import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul™"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2 | import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()
| <commit_before>import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul™"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()<commit_msg>Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2<commit_after> | import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()
| import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul™"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()
| <commit_before>import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul™"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()<commit_msg>Remove the Pythonic Soul Trademark for now.. causing unicode issues with python2<commit_after>import click
import requests
import os
import shutil
from appdirs import user_data_dir
from airpy.install import airinstall
from airpy.list import airlist
from airpy.start import airstart
from airpy.remove import airremove
from airpy.autopilot import airautopilot
def main():
@click.group()
def airpy():
"""AirPy : Documentation Installer for the Pythonic Soul"""
pass
@airpy.command(help = 'Install offline doc of a Python module.')
@click.argument('name')
def install(name):
airinstall(name)
@airpy.command(help = 'Start a doc in a browser.')
@click.argument('name')
def start(name):
airstart(name)
@airpy.command(help = 'Remove an installed doc.')
@click.argument('name')
@click.option('--all')
def remove(name, all):
airremove(name)
@airpy.command(help = 'List installed docs.')
def list():
airlist()
@airpy.command(help = 'Auto install docs.')
def autopilot():
airautopilot()
airpy()
if __name__ == '__main__':
main()
|
e59f187f2e4557114e534be57dc078ddf112b87c | completions_dev.py | completions_dev.py | import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
| import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
| Use tabs in new completions file snippet | Use tabs in new completions file snippet
Respects the user's indentation configuration.
| Python | mit | SublimeText/PackageDev,SublimeText/AAAPackageDev,SublimeText/AAAPackageDev | import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
Use tabs in new completions file snippet
Respects the user's indentation configuration. | import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
| <commit_before>import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
<commit_msg>Use tabs in new completions file snippet
Respects the user's indentation configuration.<commit_after> | import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
| import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
Use tabs in new completions file snippet
Respects the user's indentation configuration.import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
| <commit_before>import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
<commit_msg>Use tabs in new completions file snippet
Respects the user's indentation configuration.<commit_after>import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
|
62b74e6d6452012f8ad68810446a3648749a3fee | collections/show-test/print-divs.py | collections/show-test/print-divs.py | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + '</div>')
printDivs(20) | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + ': Lorem ipsum dolor sic amet</div>')
printDivs(20) | Add dummy text to divs. | Add dummy text to divs.
| Python | apache-2.0 | scholarslab/takeback,scholarslab/takeback,scholarslab/takeback,scholarslab/takeback,scholarslab/takeback | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + '</div>')
printDivs(20)Add dummy text to divs. | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + ': Lorem ipsum dolor sic amet</div>')
printDivs(20) | <commit_before># print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + '</div>')
printDivs(20)<commit_msg>Add dummy text to divs.<commit_after> | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + ': Lorem ipsum dolor sic amet</div>')
printDivs(20) | # print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + '</div>')
printDivs(20)Add dummy text to divs.# print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + ': Lorem ipsum dolor sic amet</div>')
printDivs(20) | <commit_before># print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + '</div>')
printDivs(20)<commit_msg>Add dummy text to divs.<commit_after># print-divs.py
def printDivs(num):
for i in range(num):
print('<div class="item">Item ' + str(i+1) + ': Lorem ipsum dolor sic amet</div>')
printDivs(20) |
02f18e2ec6788f4cf92e8a2f78898f6861f2f395 | gofast/gpio.py | gofast/gpio.py |
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
raise RuntimeError("Error initializing GPIO")
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
|
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
def error(*args):
raise RuntimeError("Error initializing GPIO")
write, read, cleanup, setup = error, error, error, error
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
| Make GPIO importable, but not usable, as non-root | Make GPIO importable, but not usable, as non-root
| Python | bsd-2-clause | cg123/computernetworks,cg123/computernetworks,cg123/computernetworks |
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
raise RuntimeError("Error initializing GPIO")
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
Make GPIO importable, but not usable, as non-root |
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
def error(*args):
raise RuntimeError("Error initializing GPIO")
write, read, cleanup, setup = error, error, error, error
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
| <commit_before>
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
raise RuntimeError("Error initializing GPIO")
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
<commit_msg>Make GPIO importable, but not usable, as non-root<commit_after> |
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
def error(*args):
raise RuntimeError("Error initializing GPIO")
write, read, cleanup, setup = error, error, error, error
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
|
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
raise RuntimeError("Error initializing GPIO")
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
Make GPIO importable, but not usable, as non-root
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
def error(*args):
raise RuntimeError("Error initializing GPIO")
write, read, cleanup, setup = error, error, error, error
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
| <commit_before>
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
raise RuntimeError("Error initializing GPIO")
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
<commit_msg>Make GPIO importable, but not usable, as non-root<commit_after>
import cffi
ffi = cffi.FFI()
ffi.cdef("""
int setup(void);
void setup_gpio(int gpio, int direction, int pud);
int gpio_function(int gpio);
void output_gpio(int gpio, int value);
int input_gpio(int gpio);
void set_rising_event(int gpio, int enable);
void set_falling_event(int gpio, int enable);
void set_high_event(int gpio, int enable);
void set_low_event(int gpio, int enable);
int eventdetected(int gpio);
void cleanup(void);
""")
C = ffi.verify(sources=['c_gpio.c'])
write = C.output_gpio
read = C.input_gpio
cleanup = C.cleanup
setup = C.setup_gpio
if C.setup():
def error(*args):
raise RuntimeError("Error initializing GPIO")
write, read, cleanup, setup = error, error, error, error
INPUT = 1
OUTPUT = 0
HIGH = 1
LOW = 0
PUD_OFF = 0
PUD_DOWN = 1
PUD_UP = 2
|
5c4026fbe42625a3595d26c2ef71cb1298b36547 | version.py | version.py | major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52 | major = 0
minor=0
patch=24
branch="master"
timestamp=1376526666.61 | Tag commit for v0.0.24-master generated by gitmake.py | Tag commit for v0.0.24-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52Tag commit for v0.0.24-master generated by gitmake.py | major = 0
minor=0
patch=24
branch="master"
timestamp=1376526666.61 | <commit_before>major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52<commit_msg>Tag commit for v0.0.24-master generated by gitmake.py<commit_after> | major = 0
minor=0
patch=24
branch="master"
timestamp=1376526666.61 | major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52Tag commit for v0.0.24-master generated by gitmake.pymajor = 0
minor=0
patch=24
branch="master"
timestamp=1376526666.61 | <commit_before>major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52<commit_msg>Tag commit for v0.0.24-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=24
branch="master"
timestamp=1376526666.61 |
132f91c5f3f193ca3b1a246b9ef5b20b4e03609f | core/validators.py | core/validators.py | from datetime import datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(date):
today = datetime.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d'))
if event_date - datetime.date(today) < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
| from datetime import date, datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(e_date):
today = date.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day),
'%Y-%m-%d'))
if event_date - today < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
| Apply suggested changes on date | Apply suggested changes on date
| Python | bsd-3-clause | DjangoGirls/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls | from datetime import datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(date):
today = datetime.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d'))
if event_date - datetime.date(today) < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
Apply suggested changes on date | from datetime import date, datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(e_date):
today = date.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day),
'%Y-%m-%d'))
if event_date - today < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
| <commit_before>from datetime import datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(date):
today = datetime.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d'))
if event_date - datetime.date(today) < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
<commit_msg>Apply suggested changes on date<commit_after> | from datetime import date, datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(e_date):
today = date.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day),
'%Y-%m-%d'))
if event_date - today < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
| from datetime import datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(date):
today = datetime.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d'))
if event_date - datetime.date(today) < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
Apply suggested changes on datefrom datetime import date, datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(e_date):
today = date.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day),
'%Y-%m-%d'))
if event_date - today < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
| <commit_before>from datetime import datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(date):
today = datetime.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d'))
if event_date - datetime.date(today) < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
<commit_msg>Apply suggested changes on date<commit_after>from datetime import date, datetime, timedelta
from django.core.exceptions import ValidationError
def validate_approximatedate(date):
if date.month == 0:
raise ValidationError(
'Event date can\'t be a year only. '
'Please, provide at least a month and a year.'
)
def validate_event_date(e_date):
today = date.today()
event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day),
'%Y-%m-%d'))
if event_date - today < timedelta(days=90):
raise ValidationError('Your event date is too close. '
'Workshop date should be at least 3 months (90 days) from now.')
|
e0276f6c86e07fa82f19c5f895b6e513d38255c0 | server/management/commands/friendly_model_name.py | server/management/commands/friendly_model_name.py | '''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
| """Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
| Fix missing paren, imports, spelling. | Fix missing paren, imports, spelling.
| Python | apache-2.0 | sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal | '''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
Fix missing paren, imports, spelling. | """Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
| <commit_before>'''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
<commit_msg>Fix missing paren, imports, spelling.<commit_after> | """Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
| '''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
Fix missing paren, imports, spelling."""Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
| <commit_before>'''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
<commit_msg>Fix missing paren, imports, spelling.<commit_after>"""Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
|
14cdf6b7a82e49f1860aee41e4b1a5b20cf179b2 | quickstats/signals.py | quickstats/signals.py | import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
| import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_chart(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_location(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
| Make unique names for signal functions | Make unique names for signal functions
| Python | mit | kfdm/django-simplestats,kfdm/django-simplestats | import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
Make unique names for signal functions | import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_chart(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_location(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
| <commit_before>import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
<commit_msg>Make unique names for signal functions<commit_after> | import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_chart(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_location(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
| import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
Make unique names for signal functionsimport logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_chart(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_location(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
| <commit_before>import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_data(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
<commit_msg>Make unique names for signal functions<commit_after>import logging
from . import tasks
from django.db.models.signals import post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
@receiver(post_save, sender="quickstats.Sample", dispatch_uid="quickstats-refresh-chart")
def hook_update_chart(sender, instance, *args, **kwargs):
tasks.update_chart.delay(instance.widget_id)
@receiver(post_save, sender="quickstats.Waypoint", dispatch_uid="quickstats-refresh-location")
def hook_update_location(sender, instance, *args, **kwargs):
tasks.update_location.delay(instance.widget_id)
|
171974ab9c069abe14c25ef220f683d4905d1454 | socorro/external/rabbitmq/rmq_new_crash_source.py | socorro/external/rabbitmq/rmq_new_crash_source.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""this class is a refactoring of the iteratior portion of the legacy
Socorro processor. It isolates just the part of fetching the ooids of
jobs to be processed"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""an adapter that allows this class can serve as an iterator in a
fetch_transform_save app"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""An iterable of crashes from RabbitMQ"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""Return an iterator over crashes from RabbitMQ.
Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg
is a crash ID, and the kwargs contain only a callback function which
the FTS app will call to send an ack to Rabbit after processing is
complete.
"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
| Correct docs on RabbitMQ crash source. | Correct docs on RabbitMQ crash source.
| Python | mpl-2.0 | linearregression/socorro,linearregression/socorro,Serg09/socorro,Serg09/socorro,linearregression/socorro,m8ttyB/socorro,Serg09/socorro,luser/socorro,twobraids/socorro,lonnen/socorro,bsmedberg/socorro,AdrianGaudebert/socorro,cliqz/socorro,yglazko/socorro,pcabido/socorro,lonnen/socorro,twobraids/socorro,bsmedberg/socorro,adngdb/socorro,bsmedberg/socorro,AdrianGaudebert/socorro,Tayamarn/socorro,cliqz/socorro,Tchanders/socorro,twobraids/socorro,Serg09/socorro,pcabido/socorro,m8ttyB/socorro,linearregression/socorro,KaiRo-at/socorro,spthaolt/socorro,Tayamarn/socorro,luser/socorro,Tchanders/socorro,spthaolt/socorro,KaiRo-at/socorro,KaiRo-at/socorro,mozilla/socorro,mozilla/socorro,Tayamarn/socorro,Tayamarn/socorro,Serg09/socorro,cliqz/socorro,adngdb/socorro,spthaolt/socorro,mozilla/socorro,yglazko/socorro,luser/socorro,Serg09/socorro,yglazko/socorro,AdrianGaudebert/socorro,cliqz/socorro,pcabido/socorro,rhelmer/socorro,Tchanders/socorro,Tayamarn/socorro,AdrianGaudebert/socorro,yglazko/socorro,m8ttyB/socorro,bsmedberg/socorro,lonnen/socorro,adngdb/socorro,m8ttyB/socorro,rhelmer/socorro,Tchanders/socorro,mozilla/socorro,luser/socorro,twobraids/socorro,adngdb/socorro,mozilla/socorro,m8ttyB/socorro,Tchanders/socorro,luser/socorro,AdrianGaudebert/socorro,bsmedberg/socorro,twobraids/socorro,spthaolt/socorro,Tayamarn/socorro,mozilla/socorro,twobraids/socorro,KaiRo-at/socorro,pcabido/socorro,linearregression/socorro,Tchanders/socorro,KaiRo-at/socorro,cliqz/socorro,lonnen/socorro,pcabido/socorro,yglazko/socorro,adngdb/socorro,rhelmer/socorro,pcabido/socorro,linearregression/socorro,AdrianGaudebert/socorro,luser/socorro,spthaolt/socorro,rhelmer/socorro,KaiRo-at/socorro,cliqz/socorro,m8ttyB/socorro,rhelmer/socorro,adngdb/socorro,spthaolt/socorro,rhelmer/socorro,yglazko/socorro | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""this class is a refactoring of the iteratior portion of the legacy
Socorro processor. It isolates just the part of fetching the ooids of
jobs to be processed"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""an adapter that allows this class can serve as an iterator in a
fetch_transform_save app"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
Correct docs on RabbitMQ crash source. | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""An iterable of crashes from RabbitMQ"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""Return an iterator over crashes from RabbitMQ.
Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg
is a crash ID, and the kwargs contain only a callback function which
the FTS app will call to send an ack to Rabbit after processing is
complete.
"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
| <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""this class is a refactoring of the iteratior portion of the legacy
Socorro processor. It isolates just the part of fetching the ooids of
jobs to be processed"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""an adapter that allows this class can serve as an iterator in a
fetch_transform_save app"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
<commit_msg>Correct docs on RabbitMQ crash source.<commit_after> | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""An iterable of crashes from RabbitMQ"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""Return an iterator over crashes from RabbitMQ.
Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg
is a crash ID, and the kwargs contain only a callback function which
the FTS app will call to send an ack to Rabbit after processing is
complete.
"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""this class is a refactoring of the iteratior portion of the legacy
Socorro processor. It isolates just the part of fetching the ooids of
jobs to be processed"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""an adapter that allows this class can serve as an iterator in a
fetch_transform_save app"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
Correct docs on RabbitMQ crash source.# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""An iterable of crashes from RabbitMQ"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""Return an iterator over crashes from RabbitMQ.
Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg
is a crash ID, and the kwargs contain only a callback function which
the FTS app will call to send an ack to Rabbit after processing is
complete.
"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
| <commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""this class is a refactoring of the iteratior portion of the legacy
Socorro processor. It isolates just the part of fetching the ooids of
jobs to be processed"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""an adapter that allows this class can serve as an iterator in a
fetch_transform_save app"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
<commit_msg>Correct docs on RabbitMQ crash source.<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from configman import Namespace, RequiredConfig
from configman.converters import class_converter
from functools import partial
#==============================================================================
class RMQNewCrashSource(RequiredConfig):
"""An iterable of crashes from RabbitMQ"""
required_config = Namespace()
required_config.source.add_option(
'crashstorage_class',
doc='the source storage class',
default='socorro.external.rabbitmq.crashstorage.RabbitMQCrashStorage',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config, processor_name, quit_check_callback=None):
self.config = config
self.crash_store = config.crashstorage_class(config)
#--------------------------------------------------------------------------
def close(self):
pass
#--------------------------------------------------------------------------
def __iter__(self):
"""Return an iterator over crashes from RabbitMQ.
Each crash is a tuple of the ``(args, kwargs)`` variety. The lone arg
is a crash ID, and the kwargs contain only a callback function which
the FTS app will call to send an ack to Rabbit after processing is
complete.
"""
for a_crash_id in self.crash_store.new_crashes():
yield (
(a_crash_id,),
{'finished_func': partial(
self.crash_store.ack_crash,
a_crash_id
)}
)
#--------------------------------------------------------------------------
def __call__(self):
return self.__iter__()
|
5a2673366224751e675b894c13a2152c50d28e87 | fileupload/urls.py | fileupload/urls.py | # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = patterns('',
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
| # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
from django.http import HttpResponseRedirect
urlpatterns = patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
| Update to redirect /upload/ to /upload/basic/plus/ | Update to redirect /upload/ to /upload/basic/plus/
| Python | bsd-2-clause | ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark | # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = patterns('',
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
Update to redirect /upload/ to /upload/basic/plus/ | # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
from django.http import HttpResponseRedirect
urlpatterns = patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
| <commit_before># encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = patterns('',
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
<commit_msg>Update to redirect /upload/ to /upload/basic/plus/<commit_after> | # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
from django.http import HttpResponseRedirect
urlpatterns = patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
| # encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = patterns('',
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
Update to redirect /upload/ to /upload/basic/plus/# encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
from django.http import HttpResponseRedirect
urlpatterns = patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
| <commit_before># encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
urlpatterns = patterns('',
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
<commit_msg>Update to redirect /upload/ to /upload/basic/plus/<commit_after># encoding: utf-8
from django.conf.urls import patterns, url
from fileupload.views import (
BasicVersionCreateView, BasicPlusVersionCreateView,
jQueryVersionCreateView, AngularVersionCreateView,
PictureCreateView, PictureDeleteView, PictureListView,
)
from django.http import HttpResponseRedirect
urlpatterns = patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^basic/plus/$', BasicPlusVersionCreateView.as_view(), name='upload-basic-plus'),
url(r'^delete/(?P<pk>\d+)$', PictureDeleteView.as_view(), name='upload-delete'),
url(r'^view/$', PictureListView.as_view(), name='upload-view'),
)
|
bbb3119c0087ec52185cd275b5dc132868129658 | oc/models.py | oc/models.py | class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
| class Calendar:
def __init__(self, year=2015):
self.year = year # TODO get current year
self.dates = []
for month in range(1, 13):
self.insert_dates(month)
def insert_dates(self, month):
days = 28
if month in [1, 4, 6, 9, 11]:
days = 30
if month in [3, 5, 7, 8, 10, 12]:
days = 31
print days
self.create_dates(days, month)
def create_dates(self, days, month):
for day in range(1, days + 1):
date = Date(day=day, month=month)
date.write_into(self.dates)
class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
| Create Calender with list of all dates | Create Calender with list of all dates
| Python | mit | be-ndee/object-calisthenics | class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
Create Calender with list of all dates | class Calendar:
def __init__(self, year=2015):
self.year = year # TODO get current year
self.dates = []
for month in range(1, 13):
self.insert_dates(month)
def insert_dates(self, month):
days = 28
if month in [1, 4, 6, 9, 11]:
days = 30
if month in [3, 5, 7, 8, 10, 12]:
days = 31
print days
self.create_dates(days, month)
def create_dates(self, days, month):
for day in range(1, days + 1):
date = Date(day=day, month=month)
date.write_into(self.dates)
class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
| <commit_before>class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
<commit_msg>Create Calender with list of all dates<commit_after> | class Calendar:
def __init__(self, year=2015):
self.year = year # TODO get current year
self.dates = []
for month in range(1, 13):
self.insert_dates(month)
def insert_dates(self, month):
days = 28
if month in [1, 4, 6, 9, 11]:
days = 30
if month in [3, 5, 7, 8, 10, 12]:
days = 31
print days
self.create_dates(days, month)
def create_dates(self, days, month):
for day in range(1, days + 1):
date = Date(day=day, month=month)
date.write_into(self.dates)
class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
| class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
Create Calender with list of all datesclass Calendar:
def __init__(self, year=2015):
self.year = year # TODO get current year
self.dates = []
for month in range(1, 13):
self.insert_dates(month)
def insert_dates(self, month):
days = 28
if month in [1, 4, 6, 9, 11]:
days = 30
if month in [3, 5, 7, 8, 10, 12]:
days = 31
print days
self.create_dates(days, month)
def create_dates(self, days, month):
for day in range(1, days + 1):
date = Date(day=day, month=month)
date.write_into(self.dates)
class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
| <commit_before>class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
<commit_msg>Create Calender with list of all dates<commit_after>class Calendar:
def __init__(self, year=2015):
self.year = year # TODO get current year
self.dates = []
for month in range(1, 13):
self.insert_dates(month)
def insert_dates(self, month):
days = 28
if month in [1, 4, 6, 9, 11]:
days = 30
if month in [3, 5, 7, 8, 10, 12]:
days = 31
print days
self.create_dates(days, month)
def create_dates(self, days, month):
for day in range(1, days + 1):
date = Date(day=day, month=month)
date.write_into(self.dates)
class Person:
def __init__(self, name, birth_date):
self.name = name
self.birth_date = birth_date
class BirthDate:
def __init__(self, year, date):
self.year = year
self.date = date
class Date:
def __init__(self, day, month):
self.day = day
self.month = month
def write_into(self, list):
list.append(self)
|
b627efe0675b2b1965eeac7104cf3a8f2d675539 | rhcephcompose/main.py | rhcephcompose/main.py | """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
compose = Compose(conf)
compose.run()
| """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
parser.add_argument('--insecure', action='store_const', const=True,
default=False, help='skip SSL verification')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
if args.insecure:
conf['chacra_ssl_verify'] = False
import requests
from requests.packages.urllib3.exceptions\
import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
compose = Compose(conf)
compose.run()
| Add --insecure option to command line to disable SSL certificate verification when communicating with chacra | Add --insecure option to command line to disable SSL certificate verification when communicating with chacra
| Python | mit | red-hat-storage/rhcephcompose,red-hat-storage/rhcephcompose | """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
compose = Compose(conf)
compose.run()
Add --insecure option to command line to disable SSL certificate verification when communicating with chacra | """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
parser.add_argument('--insecure', action='store_const', const=True,
default=False, help='skip SSL verification')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
if args.insecure:
conf['chacra_ssl_verify'] = False
import requests
from requests.packages.urllib3.exceptions\
import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
compose = Compose(conf)
compose.run()
| <commit_before>""" rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
compose = Compose(conf)
compose.run()
<commit_msg>Add --insecure option to command line to disable SSL certificate verification when communicating with chacra<commit_after> | """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
parser.add_argument('--insecure', action='store_const', const=True,
default=False, help='skip SSL verification')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
if args.insecure:
conf['chacra_ssl_verify'] = False
import requests
from requests.packages.urllib3.exceptions\
import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
compose = Compose(conf)
compose.run()
| """ rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
compose = Compose(conf)
compose.run()
Add --insecure option to command line to disable SSL certificate verification when communicating with chacra""" rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
parser.add_argument('--insecure', action='store_const', const=True,
default=False, help='skip SSL verification')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
if args.insecure:
conf['chacra_ssl_verify'] = False
import requests
from requests.packages.urllib3.exceptions\
import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
compose = Compose(conf)
compose.run()
| <commit_before>""" rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
compose = Compose(conf)
compose.run()
<commit_msg>Add --insecure option to command line to disable SSL certificate verification when communicating with chacra<commit_after>""" rhcephcompose CLI """
from argparse import ArgumentParser
import kobo.conf
from rhcephcompose.compose import Compose
class RHCephCompose(object):
""" Main class for rhcephcompose CLI. """
def __init__(self):
parser = ArgumentParser(description='Generate a compose for RHCS.')
parser.add_argument('config_file', metavar='config',
help='main configuration file for this release.')
parser.add_argument('--insecure', action='store_const', const=True,
default=False, help='skip SSL verification')
args = parser.parse_args()
conf = kobo.conf.PyConfigParser()
conf.load_from_file(args.config_file)
if args.insecure:
conf['chacra_ssl_verify'] = False
import requests
from requests.packages.urllib3.exceptions\
import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
compose = Compose(conf)
compose.run()
|
ffce8ea9bda95945e335fef75ba93b1066c795ac | doc/quickstart/testlibs/LoginLibrary.py | doc/quickstart/testlibs/LoginLibrary.py | import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
| import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
| Use subprocess isntead of popen to get Jython working too | Use subprocess isntead of popen to get Jython working too
--HG--
extra : convert_revision : svn%3A79c32731-664e-0410-8185-e51b9e89f9fb/trunk%403645
| Python | apache-2.0 | Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,userzimmermann/robotframework-python3,userzimmermann/robotframework-python3,Senseg/robotframework,Senseg/robotframework | import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
Use subprocess isntead of popen to get Jython working too
--HG--
extra : convert_revision : svn%3A79c32731-664e-0410-8185-e51b9e89f9fb/trunk%403645 | import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
| <commit_before>import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
<commit_msg>Use subprocess isntead of popen to get Jython working too
--HG--
extra : convert_revision : svn%3A79c32731-664e-0410-8185-e51b9e89f9fb/trunk%403645<commit_after> | import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
| import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
Use subprocess isntead of popen to get Jython working too
--HG--
extra : convert_revision : svn%3A79c32731-664e-0410-8185-e51b9e89f9fb/trunk%403645import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
| <commit_before>import os
import sys
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '"%s" %s %s' % (self._sut_path, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
<commit_msg>Use subprocess isntead of popen to get Jython working too
--HG--
extra : convert_revision : svn%3A79c32731-664e-0410-8185-e51b9e89f9fb/trunk%403645<commit_after>import os
import sys
import subprocess
class LoginLibrary:
def __init__(self):
self._sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
if not sys.executable:
raise RuntimeError("Could not find Jython installation")
command = [sys.executable, self._sut_path, command] + list(args)
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self._status = process.communicate()[0].strip()
|
07d587cdf7883418a293fc3ff5a5f078c4da211f | astrobin_apps_donations/utils.py | astrobin_apps_donations/utils.py | from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
| from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
| Fix checking whether user is donor. | Fix checking whether user is donor.
| Python | agpl-3.0 | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin | from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
Fix checking whether user is donor. | from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
| <commit_before>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
<commit_msg>Fix checking whether user is donor.<commit_after> | from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
| from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
Fix checking whether user is donor.from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
| <commit_before>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated:
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
<commit_msg>Fix checking whether user is donor.<commit_after>from subscription.models import UserSubscription
def user_is_donor(user):
if user.is_authenticated():
return UserSubscription.objects.filter(user = user, subscription__name = 'AstroBin Donor').count() > 0
return False
|
9d759bc8f7980ad4fa9707b2d6425ceac616460a | backend/post_handler/__init__.py | backend/post_handler/__init__.py | from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
print request.values
print request.form.get('sdp')
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
| from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
# print request.values
sdp_headers = request.form.get('sdp')
with open('./stream.sdp', 'w') as f:
f.write(sdp_headers)
cmd = "ffmpeg -i stream.sdp -vcodec libx264 -acodec aac -strict -2 -y ~/tmp/out.mp4 &"
import os
# os.spawnl(os.P_DETACH, cmd)
os.system(cmd)
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
| Add handling of incoming requests to post_handler | Add handling of incoming requests to post_handler
| Python | mit | optimus-team/optimus-video,optimus-team/optimus-video,optimus-team/optimus-video,optimus-team/optimus-video | from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
print request.values
print request.form.get('sdp')
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
Add handling of incoming requests to post_handler | from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
# print request.values
sdp_headers = request.form.get('sdp')
with open('./stream.sdp', 'w') as f:
f.write(sdp_headers)
cmd = "ffmpeg -i stream.sdp -vcodec libx264 -acodec aac -strict -2 -y ~/tmp/out.mp4 &"
import os
# os.spawnl(os.P_DETACH, cmd)
os.system(cmd)
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
| <commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
print request.values
print request.form.get('sdp')
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
<commit_msg>Add handling of incoming requests to post_handler<commit_after> | from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
# print request.values
sdp_headers = request.form.get('sdp')
with open('./stream.sdp', 'w') as f:
f.write(sdp_headers)
cmd = "ffmpeg -i stream.sdp -vcodec libx264 -acodec aac -strict -2 -y ~/tmp/out.mp4 &"
import os
# os.spawnl(os.P_DETACH, cmd)
os.system(cmd)
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
| from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
print request.values
print request.form.get('sdp')
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
Add handling of incoming requests to post_handlerfrom flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
# print request.values
sdp_headers = request.form.get('sdp')
with open('./stream.sdp', 'w') as f:
f.write(sdp_headers)
cmd = "ffmpeg -i stream.sdp -vcodec libx264 -acodec aac -strict -2 -y ~/tmp/out.mp4 &"
import os
# os.spawnl(os.P_DETACH, cmd)
os.system(cmd)
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
| <commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
print request.values
print request.form.get('sdp')
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
<commit_msg>Add handling of incoming requests to post_handler<commit_after>from flask import Flask
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def hello():
from flask import request
# print dir(request)
# print request.values
sdp_headers = request.form.get('sdp')
with open('./stream.sdp', 'w') as f:
f.write(sdp_headers)
cmd = "ffmpeg -i stream.sdp -vcodec libx264 -acodec aac -strict -2 -y ~/tmp/out.mp4 &"
import os
# os.spawnl(os.P_DETACH, cmd)
os.system(cmd)
return 'ok'
if __name__ == "__main__":
app.run('0.0.0.0')
|
e4c20eae4f847abe71ab661374abf14cdea3f99e | pyowm/constants.py | pyowm/constants.py | """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.6.1'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
| """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.7.0'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
| Prepare bump to version 2.7.0 | Prepare bump to version 2.7.0
| Python | mit | csparpa/pyowm,csparpa/pyowm | """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.6.1'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
Prepare bump to version 2.7.0 | """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.7.0'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
| <commit_before>"""
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.6.1'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
<commit_msg>Prepare bump to version 2.7.0<commit_after> | """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.7.0'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
| """
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.6.1'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
Prepare bump to version 2.7.0"""
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.7.0'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
| <commit_before>"""
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.6.1'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
<commit_msg>Prepare bump to version 2.7.0<commit_after>"""
Constants for the PyOWM library
"""
PYOWM_VERSION = '2.7.0'
LATEST_OWM_API_VERSION = '2.5'
DEFAULT_API_KEY = 'b1b15e88fa797225412429c1c50c122a'
|
2ccb6b1d1beddede7e98eabeeef0219bff293638 | calvin/calvinsys/sensors/distance.py | calvin/calvinsys/sensors/distance.py | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
| # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
self._has_data = False
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
| Add default value to _has_data | Add default value to _has_data
| Python | apache-2.0 | EricssonResearch/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,les69/calvin-base,les69/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
Add default value to _has_data | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
self._has_data = False
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
<commit_msg>Add default value to _has_data<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
self._has_data = False
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
| # -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
Add default value to _has_data# -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
self._has_data = False
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
<commit_msg>Add default value to _has_data<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.plugins.io.sensors.distance import distance
class Distance(object):
"""
Distance sensor
"""
def __init__(self, node, actor):
self._node = node
self._actor = actor
self._distance = distance.Distance(node, self._new_measurement)
self._has_data = False
def _new_measurement(self, measurement):
self._measurement = measurement
self._has_data = True
self._node.sched.trigger_loop(actor_ids=[self._actor])
def start(self, frequency):
self._distance.start(frequency)
def stop(self):
self._distance.stop()
def has_data(self):
return self._has_data
def read(self):
if self._has_data:
self._has_data = False
return self._measurement
def register(node=None, actor=None):
return Distance(node, actor)
|
552283714c329e3a304cd8a8bc14e5370fa6a879 | cosmo_tester/framework/constants.py | cosmo_tester/framework/constants.py | CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
| CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'5.2.2',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
| Add 5.2.2 to supported versions | Add 5.2.2 to supported versions
| Python | apache-2.0 | cloudify-cosmo/cloudify-system-tests,cloudify-cosmo/cloudify-system-tests | CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
Add 5.2.2 to supported versions | CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'5.2.2',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
| <commit_before>CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
<commit_msg>Add 5.2.2 to supported versions<commit_after> | CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'5.2.2',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
| CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
Add 5.2.2 to supported versionsCLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'5.2.2',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
| <commit_before>CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
<commit_msg>Add 5.2.2 to supported versions<commit_after>CLOUDIFY_TENANT_HEADER = 'Tenant'
SUPPORTED_RELEASES = [
'5.0.5',
'5.1.0',
'5.1.1',
'5.1.2',
'5.1.3',
'5.1.4',
'5.2.0',
'5.2.1',
'5.2.2',
'6.0.0',
'master',
]
SUPPORTED_FOR_RPM_UPGRADE = [
version + '-ga'
for version in SUPPORTED_RELEASES
if version not in ('master', '5.0.5', '5.1.0')
]
|
7848338fd8c1a73c8371617fc4b72a139380cc50 | blaze/expr/tests/test_strings.py | blaze/expr/tests/test_strings.py | import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
| import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
| Test for new like expression | Test for new like expression
| Python | bsd-3-clause | ContinuumIO/blaze,cpcloud/blaze,ContinuumIO/blaze,cpcloud/blaze,cowlicks/blaze,cowlicks/blaze | import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
Test for new like expression | import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
| <commit_before>import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
<commit_msg>Test for new like expression<commit_after> | import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
| import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
Test for new like expressionimport datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
| <commit_before>import datashape
from blaze.expr import TableSymbol, like, Like
def test_like():
t = TableSymbol('t', '{name: string, amount: int, city: string}')
expr = like(t, name='Alice*')
assert eval(str(expr)).isidentical(expr)
assert expr.schema == t.schema
assert expr.dshape[0] == datashape.var
<commit_msg>Test for new like expression<commit_after>import datashape
import pytest
from datashape import dshape
from blaze import symbol
@pytest.mark.parametrize(
'ds',
[
'var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string',
]
)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
|
e3e7fa542650cb909bb761771b08648252e9a279 | get-county-data.py | get-county-data.py | #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*fi0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
| #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*[fh][ic]0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
| Allow half and combined plats to show up in list builder | Allow half and combined plats to show up in list builder
| Python | mit | simonsonc/mn-glo-mosaic,simonsonc/mn-glo-mosaic,simonsonc/mn-glo-mosaic | #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*fi0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
Allow half and combined plats to show up in list builder | #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*[fh][ic]0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
| <commit_before>#!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*fi0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
<commit_msg>Allow half and combined plats to show up in list builder<commit_after> | #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*[fh][ic]0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
| #!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*fi0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
Allow half and combined plats to show up in list builder#!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*[fh][ic]0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
| <commit_before>#!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*fi0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
<commit_msg>Allow half and combined plats to show up in list builder<commit_after>#!/usr/bin/env python3
from ftplib import FTP
import re
excluded = [x.strip() for x in open('bad-zip-names.txt').readlines()]
counties = [x.strip() for x in open('counties.txt').readlines()]
conn = FTP('ftp.lmic.state.mn.us')
conn.login()
filter_regex = re.compile('.*[fh][ic]0.\.zip')
for county in counties:
print(county)
conn.cwd('/pub/data/basemaps/glo/{county}/Georeferenced/'.format(county=county))
zips = [x for x in conn.nlst() if x.endswith('.zip')]
fizips = [x for x in zips if filter_regex.match(x)]
final = [x for x in fizips if x not in excluded]
with open('counties/{county}.txt'.format(county=county), 'wt') as out:
for x in final:
out.write(x + '\n')
|
de31fba90a541f272868d5868b402af3d2902ecc | labonneboite/common/maps/constants.py | labonneboite/common/maps/constants.py | ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
| ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
| Add option to enable/disable each travel_mode | Add option to enable/disable each travel_mode
| Python | agpl-3.0 | StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite | ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
Add option to enable/disable each travel_mode | ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
| <commit_before>ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
<commit_msg>Add option to enable/disable each travel_mode<commit_after> | ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
| ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
Add option to enable/disable each travel_modeENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
| <commit_before>ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
DEFAULT_TRAVEL_MODE = CAR_MODE
TRAVEL_MODES = (
PUBLIC_MODE,
CAR_MODE,
)
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
<commit_msg>Add option to enable/disable each travel_mode<commit_after>ENABLE_CAR_MODE = True
ENABLE_PUBLIC_MODE = True
ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45)
CAR_MODE = 'car'
PUBLIC_MODE = 'public'
TRAVEL_MODES = ()
if ENABLE_PUBLIC_MODE:
TRAVEL_MODES += (PUBLIC_MODE,)
if ENABLE_CAR_MODE:
TRAVEL_MODES += (CAR_MODE,)
if ENABLE_CAR_MODE:
DEFAULT_TRAVEL_MODE = CAR_MODE
else:
DEFAULT_TRAVEL_MODE = PUBLIC_MODE
TRAVEL_MODES_FRENCH = {
CAR_MODE: 'Voiture',
PUBLIC_MODE: 'Transports en commun',
}
|
597451a5c33fb9f18f599627fb4a1e72daf08b90 | django/__init__.py | django/__init__.py | VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
| VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| Update django.VERSION in trunk per previous discussion | Update django.VERSION in trunk per previous discussion
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37
| Python | bsd-3-clause | svn2github/django,svn2github/django,svn2github/django | VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
Update django.VERSION in trunk per previous discussion
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37 | VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| <commit_before>VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
<commit_msg>Update django.VERSION in trunk per previous discussion
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after> | VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
Update django.VERSION in trunk per previous discussion
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
| <commit_before>VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
<commit_msg>Update django.VERSION in trunk per previous discussion
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37<commit_after>VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
7ca12bb0d2b687c41f9e3b304cc2d7be37ca7a8d | tests/_test_mau_a_vs_an.py | tests/_test_mau_a_vs_an.py | """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.check("""An apple a day keeps the doctor away.""")
assert self.check("""The Epicurean garden.""")
assert not self.check("""A apple a day keeps the doctor away.""")
assert not self.check("""An apple an day keeps the doctor away.""")
assert not self.check("""An apple an\nday keeps the doctor away.""")
| """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.passes("""An apple a day keeps the doctor away.""")
assert self.passes("""The Epicurean garden.""")
assert not self.passes("""A apple a day keeps the doctor away.""")
assert not self.passes("""An apple an day keeps the doctor away.""")
assert not self.passes("""An apple an\nday keeps the doctor away.""")
| Change 'check' to 'passes' in a vs. an check | Change 'check' to 'passes' in a vs. an check
| Python | bsd-3-clause | amperser/proselint,jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint | """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.check("""An apple a day keeps the doctor away.""")
assert self.check("""The Epicurean garden.""")
assert not self.check("""A apple a day keeps the doctor away.""")
assert not self.check("""An apple an day keeps the doctor away.""")
assert not self.check("""An apple an\nday keeps the doctor away.""")
Change 'check' to 'passes' in a vs. an check | """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.passes("""An apple a day keeps the doctor away.""")
assert self.passes("""The Epicurean garden.""")
assert not self.passes("""A apple a day keeps the doctor away.""")
assert not self.passes("""An apple an day keeps the doctor away.""")
assert not self.passes("""An apple an\nday keeps the doctor away.""")
| <commit_before>"""Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.check("""An apple a day keeps the doctor away.""")
assert self.check("""The Epicurean garden.""")
assert not self.check("""A apple a day keeps the doctor away.""")
assert not self.check("""An apple an day keeps the doctor away.""")
assert not self.check("""An apple an\nday keeps the doctor away.""")
<commit_msg>Change 'check' to 'passes' in a vs. an check<commit_after> | """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.passes("""An apple a day keeps the doctor away.""")
assert self.passes("""The Epicurean garden.""")
assert not self.passes("""A apple a day keeps the doctor away.""")
assert not self.passes("""An apple an day keeps the doctor away.""")
assert not self.passes("""An apple an\nday keeps the doctor away.""")
| """Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.check("""An apple a day keeps the doctor away.""")
assert self.check("""The Epicurean garden.""")
assert not self.check("""A apple a day keeps the doctor away.""")
assert not self.check("""An apple an day keeps the doctor away.""")
assert not self.check("""An apple an\nday keeps the doctor away.""")
Change 'check' to 'passes' in a vs. an check"""Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.passes("""An apple a day keeps the doctor away.""")
assert self.passes("""The Epicurean garden.""")
assert not self.passes("""A apple a day keeps the doctor away.""")
assert not self.passes("""An apple an day keeps the doctor away.""")
assert not self.passes("""An apple an\nday keeps the doctor away.""")
| <commit_before>"""Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.check("""An apple a day keeps the doctor away.""")
assert self.check("""The Epicurean garden.""")
assert not self.check("""A apple a day keeps the doctor away.""")
assert not self.check("""An apple an day keeps the doctor away.""")
assert not self.check("""An apple an\nday keeps the doctor away.""")
<commit_msg>Change 'check' to 'passes' in a vs. an check<commit_after>"""Unit tests for MAU101."""
from check import Check
from proselint.checks.garner import a_vs_an as chk
class TestCheck(Check):
"""Test garner.a_vs_n."""
__test__ = True
@property
def this_check(self):
"""Boilerplate."""
return chk
def test(self):
"""Ensure the test works correctly."""
assert self.passes("""An apple a day keeps the doctor away.""")
assert self.passes("""The Epicurean garden.""")
assert not self.passes("""A apple a day keeps the doctor away.""")
assert not self.passes("""An apple an day keeps the doctor away.""")
assert not self.passes("""An apple an\nday keeps the doctor away.""")
|
5ccfa503950156db79f3d63816168a4040f80b7b | testing/settings.py | testing/settings.py | # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
| # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
CELERY_TASK_SERIALIZER = 'json'
| Set task serializer to json | Set task serializer to json
| Python | bsd-3-clause | CloudNcodeInc/djmail,CloudNcodeInc/djmail,CloudNcodeInc/djmail | # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
Set task serializer to json | # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
CELERY_TASK_SERIALIZER = 'json'
| <commit_before># -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
<commit_msg>Set task serializer to json<commit_after> | # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
CELERY_TASK_SERIALIZER = 'json'
| # -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
Set task serializer to json# -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
CELERY_TASK_SERIALIZER = 'json'
| <commit_before># -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
<commit_msg>Set task serializer to json<commit_after># -*- encoding: utf-8 -*-
import os, sys
sys.path.insert(0, '..')
PROJECT_ROOT = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test'
}
}
MIDDLEWARE_CLASSES = ()
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
ADMIN_MEDIA_PREFIX = '/static/admin/'
STATICFILES_DIRS = ()
SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l'
INSTALLED_APPS = (
'djmail',
'djcelery',
'testing',
)
import djcelery
djcelery.setup_loader()
CELERY_ALWAYS_EAGER = True
CELERY_TASK_SERIALIZER = 'json'
|
482c215fc28785c53d252df95709fdd51c1c6679 | tests/frontend/conftest.py | tests/frontend/conftest.py | import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def frontend_app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(frontend_app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(frontend_app, SkyLines)
with frontend_app.app_context():
clean_db()
bootstrap()
yield frontend_app
model.db.session.rollback()
| import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(app, SkyLines)
with app.app_context():
clean_db()
bootstrap()
yield app
model.db.session.rollback()
| Rename frontend_app fixture to app | tests/frontend: Rename frontend_app fixture to app
| Python | agpl-3.0 | snip/skylines,Turbo87/skylines,shadowoneau/skylines,shadowoneau/skylines,Harry-R/skylines,Harry-R/skylines,RBE-Avionik/skylines,skylines-project/skylines,shadowoneau/skylines,RBE-Avionik/skylines,Turbo87/skylines,kerel-fs/skylines,snip/skylines,TobiasLohner/SkyLines,skylines-project/skylines,TobiasLohner/SkyLines,RBE-Avionik/skylines,RBE-Avionik/skylines,Turbo87/skylines,Harry-R/skylines,shadowoneau/skylines,skylines-project/skylines,Turbo87/skylines,kerel-fs/skylines,kerel-fs/skylines,snip/skylines,skylines-project/skylines,TobiasLohner/SkyLines,Harry-R/skylines | import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def frontend_app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(frontend_app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(frontend_app, SkyLines)
with frontend_app.app_context():
clean_db()
bootstrap()
yield frontend_app
model.db.session.rollback()
tests/frontend: Rename frontend_app fixture to app | import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(app, SkyLines)
with app.app_context():
clean_db()
bootstrap()
yield app
model.db.session.rollback()
| <commit_before>import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def frontend_app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(frontend_app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(frontend_app, SkyLines)
with frontend_app.app_context():
clean_db()
bootstrap()
yield frontend_app
model.db.session.rollback()
<commit_msg>tests/frontend: Rename frontend_app fixture to app<commit_after> | import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(app, SkyLines)
with app.app_context():
clean_db()
bootstrap()
yield app
model.db.session.rollback()
| import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def frontend_app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(frontend_app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(frontend_app, SkyLines)
with frontend_app.app_context():
clean_db()
bootstrap()
yield frontend_app
model.db.session.rollback()
tests/frontend: Rename frontend_app fixture to appimport pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(app, SkyLines)
with app.app_context():
clean_db()
bootstrap()
yield app
model.db.session.rollback()
| <commit_before>import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def frontend_app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(frontend_app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(frontend_app, SkyLines)
with frontend_app.app_context():
clean_db()
bootstrap()
yield frontend_app
model.db.session.rollback()
<commit_msg>tests/frontend: Rename frontend_app fixture to app<commit_after>import pytest
import config
from skylines import model, create_frontend_app
from skylines.app import SkyLines
from tests import setup_app, setup_db, teardown_db, clean_db
from tests.data.bootstrap import bootstrap
@pytest.yield_fixture(scope="session")
def app():
"""Set up global front-end app for functional tests
Initialized once per test-run
"""
app = create_frontend_app(config.TESTING_CONF_PATH)
with app.app_context():
setup_app(app)
setup_db()
yield app
teardown_db()
@pytest.yield_fixture(scope="function")
def frontend(app):
"""Clean database before each frontend test
This fixture uses frontend_app, suitable for functional tests.
"""
assert isinstance(app, SkyLines)
with app.app_context():
clean_db()
bootstrap()
yield app
model.db.session.rollback()
|
18cd04d24965d173a98ebb4e7425344a1992bcce | tests/test_ecdsa.py | tests/test_ecdsa.py | import pytest
import unittest
from graphenebase.ecdsa import (
sign_message,
verify_message
)
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
signature = sign_message("Foobar", wif)
self.assertTrue(verify_message("Foobar", signature))
if __name__ == '__main__':
unittest.main()
| import pytest
import unittest
from binascii import hexlify, unhexlify
import graphenebase.ecdsa as ecdsa
from graphenebase.account import PrivateKey, PublicKey, Address
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_cryptography(self):
if not ecdsa.CRYPTOGRAPHY_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "cryptography"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_secp256k1(self):
if not ecdsa.SECP256K1_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "secp256k1"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
if __name__ == '__main__':
unittest.main()
| Add unit test for cryptography and secp256k1 | Add unit test for cryptography and secp256k1
| Python | mit | xeroc/python-graphenelib | import pytest
import unittest
from graphenebase.ecdsa import (
sign_message,
verify_message
)
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
signature = sign_message("Foobar", wif)
self.assertTrue(verify_message("Foobar", signature))
if __name__ == '__main__':
unittest.main()
Add unit test for cryptography and secp256k1 | import pytest
import unittest
from binascii import hexlify, unhexlify
import graphenebase.ecdsa as ecdsa
from graphenebase.account import PrivateKey, PublicKey, Address
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_cryptography(self):
if not ecdsa.CRYPTOGRAPHY_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "cryptography"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_secp256k1(self):
if not ecdsa.SECP256K1_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "secp256k1"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
if __name__ == '__main__':
unittest.main()
| <commit_before>import pytest
import unittest
from graphenebase.ecdsa import (
sign_message,
verify_message
)
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
signature = sign_message("Foobar", wif)
self.assertTrue(verify_message("Foobar", signature))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add unit test for cryptography and secp256k1<commit_after> | import pytest
import unittest
from binascii import hexlify, unhexlify
import graphenebase.ecdsa as ecdsa
from graphenebase.account import PrivateKey, PublicKey, Address
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_cryptography(self):
if not ecdsa.CRYPTOGRAPHY_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "cryptography"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_secp256k1(self):
if not ecdsa.SECP256K1_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "secp256k1"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
if __name__ == '__main__':
unittest.main()
| import pytest
import unittest
from graphenebase.ecdsa import (
sign_message,
verify_message
)
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
signature = sign_message("Foobar", wif)
self.assertTrue(verify_message("Foobar", signature))
if __name__ == '__main__':
unittest.main()
Add unit test for cryptography and secp256k1import pytest
import unittest
from binascii import hexlify, unhexlify
import graphenebase.ecdsa as ecdsa
from graphenebase.account import PrivateKey, PublicKey, Address
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_cryptography(self):
if not ecdsa.CRYPTOGRAPHY_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "cryptography"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_secp256k1(self):
if not ecdsa.SECP256K1_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "secp256k1"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
if __name__ == '__main__':
unittest.main()
| <commit_before>import pytest
import unittest
from graphenebase.ecdsa import (
sign_message,
verify_message
)
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
signature = sign_message("Foobar", wif)
self.assertTrue(verify_message("Foobar", signature))
if __name__ == '__main__':
unittest.main()
<commit_msg>Add unit test for cryptography and secp256k1<commit_after>import pytest
import unittest
from binascii import hexlify, unhexlify
import graphenebase.ecdsa as ecdsa
from graphenebase.account import PrivateKey, PublicKey, Address
wif = "5J4KCbg1G3my9b9hCaQXnHSm6vrwW9xQTJS6ZciW2Kek7cCkCEk"
class Testcases(unittest.TestCase):
# Ignore warning:
# https://www.reddit.com/r/joinmarket/comments/5crhfh/userwarning_implicit_cast_from_char_to_a/
@pytest.mark.filterwarnings()
def test_sign_message(self):
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_cryptography(self):
if not ecdsa.CRYPTOGRAPHY_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "cryptography"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
def test_sign_message_secp256k1(self):
if not ecdsa.SECP256K1_AVAILABLE:
return
ecdsa.SECP256K1_MODULE = "secp256k1"
pub_key = bytes(repr(PrivateKey(wif).pubkey), "latin")
signature = ecdsa.sign_message("Foobar", wif)
pub_key_sig = ecdsa.verify_message("Foobar", signature)
self.assertEqual(hexlify(pub_key_sig), pub_key)
if __name__ == '__main__':
unittest.main()
|
832fecfe5bfc8951c0d302c2f913a81acfbc657c | solarnmf_main_ts.py | solarnmf_main_ts.py | #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| Fix for input options in make_t_matrix function | Fix for input options in make_t_matrix function
| Python | mit | wtbarnes/solarnmf | #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
Fix for input options in make_t_matrix function | #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| <commit_before>#solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
<commit_msg>Fix for input options in make_t_matrix function<commit_after> | #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| #solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
Fix for input options in make_t_matrix function#solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| <commit_before>#solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
<commit_msg>Fix for input options in make_t_matrix function<commit_after>#solarnmf_main_ts.py
#Will Barnes
#31 March 2015
#Import needed modules
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
|
311dfdc28bda253e20d09c84a3ba739f5e9be7ef | tests/utils_test.py | tests/utils_test.py | import datetime
import json
import unittest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
class DatetimeJSONEncoderTest(unittest.TestCase):
def test_datetime_encoder_format(self):
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
self.assertEqual(expected_json_string, json_string)
def test_datetime_encoder_typeerror(self):
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with self.assertRaises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
| import datetime
import json
import pytest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
def test_datetime_encoder_format():
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
assert json_string == expected_json_string
def test_datetime_encoder_typeerror():
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with pytest.raises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
| Convert parser tests to pytest | Convert parser tests to pytest
| Python | mit | samueldg/clippings | import datetime
import json
import unittest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
class DatetimeJSONEncoderTest(unittest.TestCase):
def test_datetime_encoder_format(self):
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
self.assertEqual(expected_json_string, json_string)
def test_datetime_encoder_typeerror(self):
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with self.assertRaises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
Convert parser tests to pytest | import datetime
import json
import pytest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
def test_datetime_encoder_format():
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
assert json_string == expected_json_string
def test_datetime_encoder_typeerror():
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with pytest.raises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
| <commit_before>import datetime
import json
import unittest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
class DatetimeJSONEncoderTest(unittest.TestCase):
def test_datetime_encoder_format(self):
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
self.assertEqual(expected_json_string, json_string)
def test_datetime_encoder_typeerror(self):
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with self.assertRaises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
<commit_msg>Convert parser tests to pytest<commit_after> | import datetime
import json
import pytest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
def test_datetime_encoder_format():
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
assert json_string == expected_json_string
def test_datetime_encoder_typeerror():
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with pytest.raises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
| import datetime
import json
import unittest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
class DatetimeJSONEncoderTest(unittest.TestCase):
def test_datetime_encoder_format(self):
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
self.assertEqual(expected_json_string, json_string)
def test_datetime_encoder_typeerror(self):
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with self.assertRaises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
Convert parser tests to pytestimport datetime
import json
import pytest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
def test_datetime_encoder_format():
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
assert json_string == expected_json_string
def test_datetime_encoder_typeerror():
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with pytest.raises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
| <commit_before>import datetime
import json
import unittest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
class DatetimeJSONEncoderTest(unittest.TestCase):
def test_datetime_encoder_format(self):
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
self.assertEqual(expected_json_string, json_string)
def test_datetime_encoder_typeerror(self):
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with self.assertRaises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
<commit_msg>Convert parser tests to pytest<commit_after>import datetime
import json
import pytest
from clippings.utils import DatetimeJSONEncoder
DATE = datetime.datetime(2016, 1, 2, 3, 4, 5)
DATE_STRING = "2016-01-02T03:04:05"
def test_datetime_encoder_format():
dictionary = {"now": DATE}
expected_json_string = json.dumps({"now": DATE_STRING})
json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder)
assert json_string == expected_json_string
def test_datetime_encoder_typeerror():
undumpable_dictionary = {"set": set()}
# Ensure we let the parent raise TypeError
with pytest.raises(TypeError):
json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
|
2f9c912c9071a498feb8d9cca69e447ffec397be | polygamy/pygit2_git.py | polygamy/pygit2_git.py | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
| from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| Implement set_remote_url in pygit2 implementation | Implement set_remote_url in pygit2 implementation
| Python | bsd-3-clause | solarnz/polygamy,solarnz/polygamy | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
Implement set_remote_url in pygit2 implementation | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| <commit_before>from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
<commit_msg>Implement set_remote_url in pygit2 implementation<commit_after> | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
Implement set_remote_url in pygit2 implementationfrom __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| <commit_before>from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
<commit_msg>Implement set_remote_url in pygit2 implementation<commit_after>from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
|
cc51f18f0c123ed9ef68b35264f0e1f53ae22588 | index_addresses.py | index_addresses.py | import csv
import re
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': ELASTICSEARCH_URL})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
| import csv
import re
import os
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': os.environ['ELASTICSEARCH_URL']})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
| Add correct syntax for environment variable | Add correct syntax for environment variable
| Python | mit | codeforamerica/streetscope,codeforamerica/streetscope | import csv
import re
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': ELASTICSEARCH_URL})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
Add correct syntax for environment variable | import csv
import re
import os
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': os.environ['ELASTICSEARCH_URL']})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
| <commit_before>import csv
import re
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': ELASTICSEARCH_URL})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
<commit_msg>Add correct syntax for environment variable<commit_after> | import csv
import re
import os
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': os.environ['ELASTICSEARCH_URL']})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
| import csv
import re
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': ELASTICSEARCH_URL})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
Add correct syntax for environment variableimport csv
import re
import os
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': os.environ['ELASTICSEARCH_URL']})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
| <commit_before>import csv
import re
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': ELASTICSEARCH_URL})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
<commit_msg>Add correct syntax for environment variable<commit_after>import csv
import re
import os
from elasticsearch import Elasticsearch
es = Elasticsearch({'host': os.environ['ELASTICSEARCH_URL']})
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
|
57318652ba9aacc0456334a1d6466734f35ab84d | e2etest/e2etest.py | e2etest/e2etest.py | #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
| #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift, e2etest_cormap
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
test_suite.addTest(e2etest_cormap.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
| Add cormapy test suite to e2e test suite | Add cormapy test suite to e2e test suite
| Python | mit | kif/freesas,kif/freesas,kif/freesas | #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
Add cormapy test suite to e2e test suite | #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift, e2etest_cormap
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
test_suite.addTest(e2etest_cormap.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
| <commit_before>#!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
<commit_msg>Add cormapy test suite to e2e test suite<commit_after> | #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift, e2etest_cormap
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
test_suite.addTest(e2etest_cormap.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
| #!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
Add cormapy test suite to e2e test suite#!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift, e2etest_cormap
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
test_suite.addTest(e2etest_cormap.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
| <commit_before>#!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
<commit_msg>Add cormapy test suite to e2e test suite<commit_after>#!/usr/bin/env python
# coding: utf-8
"""Run the end to end tests of the project."""
__author__ = "Martha Brennich"
__license__ = "MIT"
__copyright__ = "2020"
__date__ = "11/07/2020"
import sys
import unittest
import e2etest_freesas, e2etest_guinier_apps, e2etest_bift, e2etest_cormap
def suite():
"""Creates suite for e2e tests"""
test_suite = unittest.TestSuite()
test_suite.addTest(e2etest_freesas.suite())
test_suite.addTest(e2etest_guinier_apps.suite())
test_suite.addTest(e2etest_bift.suite())
test_suite.addTest(e2etest_cormap.suite())
return test_suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
result = runner.run(suite())
if result.wasSuccessful():
EXIT_STATUS = 0
else:
EXIT_STATUS = 1
sys.exit(EXIT_STATUS)
|
d0703be1d6adf6466f8c2120334a703210697176 | GCodeWriter.py | GCodeWriter.py | from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
gcode = getattr(mesh_data, 'gcode', False)
if gcode:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
f.write(gcode)
storage_device.closeFile(f)
return True
return False
| from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
from UM.Application import Application
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
scene = Application.getInstance().getController().getScene()
gcode_list = getattr(scene, 'gcode_list')
if gcode_list:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
for gcode in gcode_list:
f.write(gcode)
storage_device.closeFile(f)
return True
return False
| Use the new CuraEngine GCode protocol instead of temp files. | Use the new CuraEngine GCode protocol instead of temp files.
| Python | agpl-3.0 | ynotstartups/Wanhao,lo0ol/Ultimaker-Cura,senttech/Cura,ad1217/Cura,lo0ol/Ultimaker-Cura,Curahelper/Cura,quillford/Cura,derekhe/Cura,totalretribution/Cura,quillford/Cura,ynotstartups/Wanhao,fieldOfView/Cura,fieldOfView/Cura,ad1217/Cura,markwal/Cura,bq/Ultimaker-Cura,fxtentacle/Cura,fxtentacle/Cura,hmflash/Cura,derekhe/Cura,hmflash/Cura,senttech/Cura,markwal/Cura,totalretribution/Cura,DeskboxBrazil/Cura,DeskboxBrazil/Cura,bq/Ultimaker-Cura,Curahelper/Cura | from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
gcode = getattr(mesh_data, 'gcode', False)
if gcode:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
f.write(gcode)
storage_device.closeFile(f)
return True
return False
Use the new CuraEngine GCode protocol instead of temp files. | from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
from UM.Application import Application
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
scene = Application.getInstance().getController().getScene()
gcode_list = getattr(scene, 'gcode_list')
if gcode_list:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
for gcode in gcode_list:
f.write(gcode)
storage_device.closeFile(f)
return True
return False
| <commit_before>from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
gcode = getattr(mesh_data, 'gcode', False)
if gcode:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
f.write(gcode)
storage_device.closeFile(f)
return True
return False
<commit_msg>Use the new CuraEngine GCode protocol instead of temp files.<commit_after> | from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
from UM.Application import Application
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
scene = Application.getInstance().getController().getScene()
gcode_list = getattr(scene, 'gcode_list')
if gcode_list:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
for gcode in gcode_list:
f.write(gcode)
storage_device.closeFile(f)
return True
return False
| from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
gcode = getattr(mesh_data, 'gcode', False)
if gcode:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
f.write(gcode)
storage_device.closeFile(f)
return True
return False
Use the new CuraEngine GCode protocol instead of temp files.from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
from UM.Application import Application
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
scene = Application.getInstance().getController().getScene()
gcode_list = getattr(scene, 'gcode_list')
if gcode_list:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
for gcode in gcode_list:
f.write(gcode)
storage_device.closeFile(f)
return True
return False
| <commit_before>from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
gcode = getattr(mesh_data, 'gcode', False)
if gcode:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
f.write(gcode)
storage_device.closeFile(f)
return True
return False
<commit_msg>Use the new CuraEngine GCode protocol instead of temp files.<commit_after>from UM.Mesh.MeshWriter import MeshWriter
from UM.Logger import Logger
from UM.Application import Application
import io
class GCodeWriter(MeshWriter):
def __init__(self):
super().__init__()
self._gcode = None
def write(self, file_name, storage_device, mesh_data):
if 'gcode' in file_name:
scene = Application.getInstance().getController().getScene()
gcode_list = getattr(scene, 'gcode_list')
if gcode_list:
f = storage_device.openFile(file_name, 'wt')
Logger.log('d', "Writing GCode to file %s", file_name)
for gcode in gcode_list:
f.write(gcode)
storage_device.closeFile(f)
return True
return False
|
6ee261309f4492994b52403d485bdfd08739a072 | kolibri/utils/tests/test_handler.py | kolibri/utils/tests/test_handler.py | import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
| import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
| Fix argument ordering in log handler test. | Fix argument ordering in log handler test.
| Python | mit | indirectlylit/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri | import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
Fix argument ordering in log handler test. | import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
| <commit_before>import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
<commit_msg>Fix argument ordering in log handler test.<commit_after> | import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
| import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
Fix argument ordering in log handler test.import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
| <commit_before>import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["--skipupdate", "manage", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
<commit_msg>Fix argument ordering in log handler test.<commit_after>import os
from time import sleep
from django.conf import settings
from django.test import TestCase
from kolibri.utils import cli
class KolibriTimedRotatingFileHandlerTestCase(TestCase):
def test_do_rollover(self):
archive_dir = os.path.join(os.environ["KOLIBRI_HOME"], "logs", "archive")
orig_value = settings.LOGGING["handlers"]["file"]["when"]
# Temporarily set the rotation time of the log file to be every second
settings.LOGGING["handlers"]["file"]["when"] = "s"
# make sure that kolibri will be running for more than one second
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
sleep(1)
try:
cli.main(["manage", "--skipupdate", "help"])
except SystemExit:
pass
# change back to the original rotation time
settings.LOGGING["handlers"]["file"]["when"] = orig_value
self.assertNotEqual(os.listdir(archive_dir), [])
|
6a06ae04309b3d881b7001836b5c9cec86a59eae | api/main.py | api/main.py | from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
| from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
| Make output even more minimal. | Make output even more minimal.
| Python | mit | EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger | from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
Make output even more minimal. | from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
| <commit_before>from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
<commit_msg>Make output even more minimal.<commit_after> | from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
| from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
Make output even more minimal.from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
| <commit_before>from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
<commit_msg>Make output even more minimal.<commit_after>from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
|
db47b7622595356ef75b18ef09ac8a5c2a55581e | foo.py | foo.py | """foo.py – a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
| """foo.py - a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
| Change extended ASCII character in docstring | Change extended ASCII character in docstring
Fix a – and replace it with a - | Python | mit | Auctoris/ctypes_demo,Auctoris/ctypes_demo | """foo.py – a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
Change extended ASCII character in docstring
Fix a – and replace it with a - | """foo.py - a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
| <commit_before>"""foo.py – a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
<commit_msg>Change extended ASCII character in docstring
Fix a – and replace it with a -<commit_after> | """foo.py - a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
| """foo.py – a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
Change extended ASCII character in docstring
Fix a – and replace it with a -"""foo.py - a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
| <commit_before>"""foo.py – a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
<commit_msg>Change extended ASCII character in docstring
Fix a – and replace it with a -<commit_after>"""foo.py - a simple demo of importing a calss from C++"""
import ctypes
lib = ctypes.cdll.LoadLibrary('./libfoo.so')
class Foo(object):
"""The Foo class supports two methods, bar, and foobar..."""
def __init__(self, val):
lib.Foo_new.argtypes = [ctypes.c_int]
lib.Foo_new.restype = ctypes.c_void_p
lib.Foo_bar.argtypes = [ctypes.c_void_p]
lib.Foo_bar.restype = ctypes.c_char_p
lib.Foo_foobar.argtypes = [ctypes.c_void_p, ctypes.c_int]
lib.Foo_foobar.restype = ctypes.c_int
self.obj = lib.Foo_new(val)
def bar(self):
"""bar returns a string continaing the value"""
return (lib.Foo_bar(self.obj)).decode()
def foobar(self, val):
"""foobar takes an integer, and adds it to the value in the Foo class
- returning the result"""
return lib.Foo_foobar(self.obj, val)
|
dc4511324bcd518dfceb828eacd72b64a5442468 | tests/test_wolfram_alpha.py | tests/test_wolfram_alpha.py | # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "what is the airspeed of an unladen swallow?"
target = ("#channel", u"estimated average cruising airspeed of an unladen European swallow = 11 m/s (meters per second) | (asked, but not answered, about a general swallow in the 1975 film Monty Python and the Holy Grail)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
| # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "answer to the life universe and everything"
target = ("#channel", u"Answer to the Ultimate Question of Life, the Universe, and Everything = 42 | (according to Douglas Adams' humorous science-fiction novel The Hitchhiker's Guide to the Galaxy)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
| Change complex test to one that doesn't have a localizable response | Change complex test to one that doesn't have a localizable response
| Python | bsd-3-clause | lepinkainen/pyfibot,rnyberg/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,aapa/pyfibot,aapa/pyfibot,huqa/pyfibot,huqa/pyfibot,rnyberg/pyfibot,EArmour/pyfibot | # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "what is the airspeed of an unladen swallow?"
target = ("#channel", u"estimated average cruising airspeed of an unladen European swallow = 11 m/s (meters per second) | (asked, but not answered, about a general swallow in the 1975 film Monty Python and the Holy Grail)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
Change complex test to one that doesn't have a localizable response | # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "answer to the life universe and everything"
target = ("#channel", u"Answer to the Ultimate Question of Life, the Universe, and Everything = 42 | (according to Douglas Adams' humorous science-fiction novel The Hitchhiker's Guide to the Galaxy)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
| <commit_before># -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "what is the airspeed of an unladen swallow?"
target = ("#channel", u"estimated average cruising airspeed of an unladen European swallow = 11 m/s (meters per second) | (asked, but not answered, about a general swallow in the 1975 film Monty Python and the Holy Grail)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
<commit_msg>Change complex test to one that doesn't have a localizable response<commit_after> | # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "answer to the life universe and everything"
target = ("#channel", u"Answer to the Ultimate Question of Life, the Universe, and Everything = 42 | (according to Douglas Adams' humorous science-fiction novel The Hitchhiker's Guide to the Galaxy)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
| # -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "what is the airspeed of an unladen swallow?"
target = ("#channel", u"estimated average cruising airspeed of an unladen European swallow = 11 m/s (meters per second) | (asked, but not answered, about a general swallow in the 1975 film Monty Python and the Holy Grail)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
Change complex test to one that doesn't have a localizable response# -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "answer to the life universe and everything"
target = ("#channel", u"Answer to the Ultimate Question of Life, the Universe, and Everything = 42 | (according to Douglas Adams' humorous science-fiction novel The Hitchhiker's Guide to the Galaxy)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
| <commit_before># -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "what is the airspeed of an unladen swallow?"
target = ("#channel", u"estimated average cruising airspeed of an unladen European swallow = 11 m/s (meters per second) | (asked, but not answered, about a general swallow in the 1975 film Monty Python and the Holy Grail)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
<commit_msg>Change complex test to one that doesn't have a localizable response<commit_after># -*- coding: utf-8 -*-
from nose.tools import eq_
import bot_mock
from pyfibot.modules import module_wolfram_alpha
config = {"module_wolfram_alpha":
{"appid": "3EYA3R-WVR6GJQWLH"}} # unit-test only APPID, do not abuse kthxbai
bot = bot_mock.BotMock(config)
def test_simple():
module_wolfram_alpha.init(bot)
query = "42"
target = ("#channel", u"42 = forty-two")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
def test_complex():
query = "answer to the life universe and everything"
target = ("#channel", u"Answer to the Ultimate Question of Life, the Universe, and Everything = 42 | (according to Douglas Adams' humorous science-fiction novel The Hitchhiker's Guide to the Galaxy)")
result = module_wolfram_alpha.command_wa(bot, None, "#channel", query)
eq_(target, result)
|
680a9345cc4087c521f5720472246bbf62e087c9 | wsgi/foodcheck_proj/foodcheck_app/management/commands/import_city_data.py | wsgi/foodcheck_proj/foodcheck_app/management/commands/import_city_data.py | from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
class Command(BaseCommand):
args = '<city_name city_name ...>'
help = 'Imports the city data from a CSV into the database'
def handle(self, *args, **options):
self.stdout.write('Successfully ran the function')
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
| from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
import os
class Command(BaseCommand):
# args = '<city_name city_name ...>' #Don't know what this does yet
help = 'Imports the city data from a CSV into the database'
def __load_csv_to_dict(csv_filepath):
csvfile = open(csv_filepath)
dialect = csv.Sniffer().sniff(csvfile.read(4098))
csvfile.seek(0)
return csv.DictReader(csvfile, dialect=dialect)
def __load_sf_dict_to_db():
import csv
# Read in Restaurant data
csv_dict_array =
# TODO Find the latest data dump instead of hardcoding the name
__load_csv_to_dict(os.path.join(os.environ['OPENSHIFT_REPO_DIR'],
"data", "data_dumps",
"20130805_business_plus.csv"))
for row in csv_dict_array:
self.stdout.write(row)
# restaurant_object = Restaurant(name=row['name'], address=row['address'] . . . )
# restaurant_object.save()
self.stdout.write('Successfully loaded row')
pass
# Read in Score data
# Read in Violation data
def handle(self, *args, **options):
__load_sf_dict_to_db()
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
| Test pulling from the business.csv | Test pulling from the business.csv
| Python | agpl-3.0 | esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck | from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
class Command(BaseCommand):
args = '<city_name city_name ...>'
help = 'Imports the city data from a CSV into the database'
def handle(self, *args, **options):
self.stdout.write('Successfully ran the function')
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
Test pulling from the business.csv | from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
import os
class Command(BaseCommand):
# args = '<city_name city_name ...>' #Don't know what this does yet
help = 'Imports the city data from a CSV into the database'
def __load_csv_to_dict(csv_filepath):
csvfile = open(csv_filepath)
dialect = csv.Sniffer().sniff(csvfile.read(4098))
csvfile.seek(0)
return csv.DictReader(csvfile, dialect=dialect)
def __load_sf_dict_to_db():
import csv
# Read in Restaurant data
csv_dict_array =
# TODO Find the latest data dump instead of hardcoding the name
__load_csv_to_dict(os.path.join(os.environ['OPENSHIFT_REPO_DIR'],
"data", "data_dumps",
"20130805_business_plus.csv"))
for row in csv_dict_array:
self.stdout.write(row)
# restaurant_object = Restaurant(name=row['name'], address=row['address'] . . . )
# restaurant_object.save()
self.stdout.write('Successfully loaded row')
pass
# Read in Score data
# Read in Violation data
def handle(self, *args, **options):
__load_sf_dict_to_db()
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
| <commit_before>from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
class Command(BaseCommand):
args = '<city_name city_name ...>'
help = 'Imports the city data from a CSV into the database'
def handle(self, *args, **options):
self.stdout.write('Successfully ran the function')
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
<commit_msg>Test pulling from the business.csv<commit_after> | from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
import os
class Command(BaseCommand):
# args = '<city_name city_name ...>' #Don't know what this does yet
help = 'Imports the city data from a CSV into the database'
def __load_csv_to_dict(csv_filepath):
csvfile = open(csv_filepath)
dialect = csv.Sniffer().sniff(csvfile.read(4098))
csvfile.seek(0)
return csv.DictReader(csvfile, dialect=dialect)
def __load_sf_dict_to_db():
import csv
# Read in Restaurant data
csv_dict_array =
# TODO Find the latest data dump instead of hardcoding the name
__load_csv_to_dict(os.path.join(os.environ['OPENSHIFT_REPO_DIR'],
"data", "data_dumps",
"20130805_business_plus.csv"))
for row in csv_dict_array:
self.stdout.write(row)
# restaurant_object = Restaurant(name=row['name'], address=row['address'] . . . )
# restaurant_object.save()
self.stdout.write('Successfully loaded row')
pass
# Read in Score data
# Read in Violation data
def handle(self, *args, **options):
__load_sf_dict_to_db()
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
| from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
class Command(BaseCommand):
args = '<city_name city_name ...>'
help = 'Imports the city data from a CSV into the database'
def handle(self, *args, **options):
self.stdout.write('Successfully ran the function')
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
Test pulling from the business.csvfrom django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
import os
class Command(BaseCommand):
# args = '<city_name city_name ...>' #Don't know what this does yet
help = 'Imports the city data from a CSV into the database'
def __load_csv_to_dict(csv_filepath):
csvfile = open(csv_filepath)
dialect = csv.Sniffer().sniff(csvfile.read(4098))
csvfile.seek(0)
return csv.DictReader(csvfile, dialect=dialect)
def __load_sf_dict_to_db():
import csv
# Read in Restaurant data
csv_dict_array =
# TODO Find the latest data dump instead of hardcoding the name
__load_csv_to_dict(os.path.join(os.environ['OPENSHIFT_REPO_DIR'],
"data", "data_dumps",
"20130805_business_plus.csv"))
for row in csv_dict_array:
self.stdout.write(row)
# restaurant_object = Restaurant(name=row['name'], address=row['address'] . . . )
# restaurant_object.save()
self.stdout.write('Successfully loaded row')
pass
# Read in Score data
# Read in Violation data
def handle(self, *args, **options):
__load_sf_dict_to_db()
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
| <commit_before>from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
class Command(BaseCommand):
args = '<city_name city_name ...>'
help = 'Imports the city data from a CSV into the database'
def handle(self, *args, **options):
self.stdout.write('Successfully ran the function')
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
<commit_msg>Test pulling from the business.csv<commit_after>from django.core.management.base import BaseCommand
from foodcheck_app.models import Restaurant, Score, Violation
import os
class Command(BaseCommand):
# args = '<city_name city_name ...>' #Don't know what this does yet
help = 'Imports the city data from a CSV into the database'
def __load_csv_to_dict(csv_filepath):
csvfile = open(csv_filepath)
dialect = csv.Sniffer().sniff(csvfile.read(4098))
csvfile.seek(0)
return csv.DictReader(csvfile, dialect=dialect)
def __load_sf_dict_to_db():
import csv
# Read in Restaurant data
csv_dict_array =
# TODO Find the latest data dump instead of hardcoding the name
__load_csv_to_dict(os.path.join(os.environ['OPENSHIFT_REPO_DIR'],
"data", "data_dumps",
"20130805_business_plus.csv"))
for row in csv_dict_array:
self.stdout.write(row)
# restaurant_object = Restaurant(name=row['name'], address=row['address'] . . . )
# restaurant_object.save()
self.stdout.write('Successfully loaded row')
pass
# Read in Score data
# Read in Violation data
def handle(self, *args, **options):
__load_sf_dict_to_db()
# vim:expandtab tabstop=8 shiftwidth=4 ts=8 sw=4 softtabstop=4
|
292ee86bb7c21c3bc99ff04176592b74aa5b1e85 | docs/config/all.py | docs/config/all.py | # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11', 'current': True},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.11'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
| # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.12', 'number': '/phinx/12', 'title': '0.12', 'current': True}
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11'},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.12'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
| Update docs config for 0.12 | Update docs config for 0.12 | Python | mit | robmorgan/phinx | # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11', 'current': True},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.11'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
Update docs config for 0.12 | # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.12', 'number': '/phinx/12', 'title': '0.12', 'current': True}
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11'},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.12'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
| <commit_before># -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11', 'current': True},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.11'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
<commit_msg>Update docs config for 0.12<commit_after> | # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.12', 'number': '/phinx/12', 'title': '0.12', 'current': True}
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11'},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.12'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
| # -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11', 'current': True},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.11'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
Update docs config for 0.12# -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.12', 'number': '/phinx/12', 'title': '0.12', 'current': True}
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11'},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.12'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
| <commit_before># -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11', 'current': True},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.11'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
<commit_msg>Update docs config for 0.12<commit_after># -*- coding: utf-8 -*-
#
# Phinx documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 14 17:39:42 2012.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The full version, including alpha/beta/rc tags.
release = '0.12.x'
# The search index version.
search_version = 'phinx-0'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Phinx'
# Other versions that display in the version picker menu.
version_list = [
{'name': '0.12', 'number': '/phinx/12', 'title': '0.12', 'current': True}
{'name': '0.11', 'number': '/phinx/11', 'title': '0.11'},
]
# Languages available.
languages = ['en', 'es', 'fr', 'ja']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '0.12'
show_root_link = True
repository = 'cakephp/phinx'
source_path = 'docs/'
hide_page_contents = ('search', '404', 'contents')
|
d191a947e34e4d6eee1965f4896a44efc8c7ae91 | feedback/views.py | feedback/views.py | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request)) | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request, template_name='feedback/feedback_form.html'):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
| Allow passing of template_name to view | Allow passing of template_name to view
| Python | bsd-3-clause | girasquid/django-feedback | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request))Allow passing of template_name to view | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request, template_name='feedback/feedback_form.html'):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
| <commit_before>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request))<commit_msg>Allow passing of template_name to view<commit_after> | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request, template_name='feedback/feedback_form.html'):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
| from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request))Allow passing of template_name to viewfrom django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request, template_name='feedback/feedback_form.html'):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
| <commit_before>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response('feedback/feedback_form.html', {'form': form}, context_instance=RequestContext(request))<commit_msg>Allow passing of template_name to view<commit_after>from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from feedback.forms import FeedbackForm
def leave_feedback(request, template_name='feedback/feedback_form.html'):
form = FeedbackForm(request.POST or None)
if form.is_valid():
feedback = form.save(commit=False)
feedback.user = request.user
feedback.save()
request.user.message_set.create(message="Your feedback has been saved successfully.")
return HttpResponseRedirect(request.POST.get('next', request.META.get('HTTP_REFERER', '/')))
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
|
8074fca48f6a7246f26471ecdc14633d78475d8c | opps/articles/utils.py | opps/articles/utils.py | # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| Add context channel_long_slug on articles | Add context channel_long_slug on articles
| Python | mit | opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps | # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
Add context channel_long_slug on articles | # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| <commit_before># -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
<commit_msg>Add context channel_long_slug on articles<commit_after> | # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| # -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
Add context channel_long_slug on articles# -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| <commit_before># -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
<commit_msg>Add context channel_long_slug on articles<commit_after># -*- coding: utf-8 -*-
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
|
4ce3685ec4aab479a4d8c7a1d41d7028285c1656 | laalaa/apps/advisers/healthchecks.py | laalaa/apps/advisers/healthchecks.py | from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
| from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings", namespace="CELERY")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
| Load namespaced Celery configuration in healthcheck | Load namespaced Celery configuration in healthcheck
In bed52d9c60b00be751a6a9a6fc78b333fc5bccf6, I had to change the
configuration to be compatible with Django. I completely missed this
part.
Unfortunately, the test for this module starts with mocking the
`get_stats()` function, where this code exists, so I am at loss at how
to test this.
For now, I think we can expose `/healthcheck.json` on the staging
environment which would have helped detect this problem. In the
long-term, we will use a separate healthcheck for the worker processes.
There is still the question whether the "webapp" can connect to the
"broker" which I feel could be tackled by this app's healthcheck.
Thoughts for later, I suppose.
| Python | mit | ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api | from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
Load namespaced Celery configuration in healthcheck
In bed52d9c60b00be751a6a9a6fc78b333fc5bccf6, I had to change the
configuration to be compatible with Django. I completely missed this
part.
Unfortunately, the test for this module starts with mocking the
`get_stats()` function, where this code exists, so I am at loss at how
to test this.
For now, I think we can expose `/healthcheck.json` on the staging
environment which would have helped detect this problem. In the
long-term, we will use a separate healthcheck for the worker processes.
There is still the question whether the "webapp" can connect to the
"broker" which I feel could be tackled by this app's healthcheck.
Thoughts for later, I suppose. | from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings", namespace="CELERY")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
| <commit_before>from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
<commit_msg>Load namespaced Celery configuration in healthcheck
In bed52d9c60b00be751a6a9a6fc78b333fc5bccf6, I had to change the
configuration to be compatible with Django. I completely missed this
part.
Unfortunately, the test for this module starts with mocking the
`get_stats()` function, where this code exists, so I am at loss at how
to test this.
For now, I think we can expose `/healthcheck.json` on the staging
environment which would have helped detect this problem. In the
long-term, we will use a separate healthcheck for the worker processes.
There is still the question whether the "webapp" can connect to the
"broker" which I feel could be tackled by this app's healthcheck.
Thoughts for later, I suppose.<commit_after> | from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings", namespace="CELERY")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
| from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
Load namespaced Celery configuration in healthcheck
In bed52d9c60b00be751a6a9a6fc78b333fc5bccf6, I had to change the
configuration to be compatible with Django. I completely missed this
part.
Unfortunately, the test for this module starts with mocking the
`get_stats()` function, where this code exists, so I am at loss at how
to test this.
For now, I think we can expose `/healthcheck.json` on the staging
environment which would have helped detect this problem. In the
long-term, we will use a separate healthcheck for the worker processes.
There is still the question whether the "webapp" can connect to the
"broker" which I feel could be tackled by this app's healthcheck.
Thoughts for later, I suppose.from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings", namespace="CELERY")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
| <commit_before>from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
<commit_msg>Load namespaced Celery configuration in healthcheck
In bed52d9c60b00be751a6a9a6fc78b333fc5bccf6, I had to change the
configuration to be compatible with Django. I completely missed this
part.
Unfortunately, the test for this module starts with mocking the
`get_stats()` function, where this code exists, so I am at loss at how
to test this.
For now, I think we can expose `/healthcheck.json` on the staging
environment which would have helped detect this problem. In the
long-term, we will use a separate healthcheck for the worker processes.
There is still the question whether the "webapp" can connect to the
"broker" which I feel could be tackled by this app's healthcheck.
Thoughts for later, I suppose.<commit_after>from django.conf import settings
from moj_irat.healthchecks import HealthcheckResponse, UrlHealthcheck, registry
def get_stats():
from celery import Celery
app = Celery("laalaa")
app.config_from_object("django.conf:settings", namespace="CELERY")
return app.control.inspect().stats()
class CeleryWorkersHealthcheck(object):
def __init__(self, name):
self.name = name
def __call__(self, *args, **kwargs):
try:
stats = get_stats()
if not stats:
return self.error_response("No running workers were found.")
workers = stats.values()
if not workers:
return self.error_response("No workers running.")
except IOError as e:
msg = str(e)
msg += ". Check that the message broker is running."
return self.error_response(msg)
except ImportError as e:
return self.error_response(str(e))
return self.success_response()
def error_response(self, error):
return HealthcheckResponse(self.name, False, error=error)
def success_response(self):
return HealthcheckResponse(self.name, True)
registry.register_healthcheck(
UrlHealthcheck(name="postcodes.io", url="{}/postcodes/SW1A1AA".format(settings.POSTCODES_IO_URL))
)
registry.register_healthcheck(CeleryWorkersHealthcheck(name="workers"))
|
4a4da808289ad2edd6549cca921fbfd8fa4049c9 | corehq/apps/es/tests/test_sms.py | corehq/apps/es/tests/test_sms.py | from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"not": {
"and": (
{"term": {"direction": "o"}},
{"term": {"processed": False}},
)
}
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
| from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"or": (
{
"not": {"term": {"direction": "o"}},
},
{
"not": {"term": {"processed": False}},
}
),
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
| Fix SMS ES test after not-and rewrite | Fix SMS ES test after not-and rewrite
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"not": {
"and": (
{"term": {"direction": "o"}},
{"term": {"processed": False}},
)
}
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
Fix SMS ES test after not-and rewrite | from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"or": (
{
"not": {"term": {"direction": "o"}},
},
{
"not": {"term": {"processed": False}},
}
),
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
| <commit_before>from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"not": {
"and": (
{"term": {"direction": "o"}},
{"term": {"processed": False}},
)
}
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
<commit_msg>Fix SMS ES test after not-and rewrite<commit_after> | from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"or": (
{
"not": {"term": {"direction": "o"}},
},
{
"not": {"term": {"processed": False}},
}
),
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
| from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"not": {
"and": (
{"term": {"direction": "o"}},
{"term": {"processed": False}},
)
}
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
Fix SMS ES test after not-and rewritefrom django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"or": (
{
"not": {"term": {"direction": "o"}},
},
{
"not": {"term": {"processed": False}},
}
),
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
| <commit_before>from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"not": {
"and": (
{"term": {"direction": "o"}},
{"term": {"processed": False}},
)
}
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
<commit_msg>Fix SMS ES test after not-and rewrite<commit_after>from django.test.testcases import SimpleTestCase
from corehq.apps.es.sms import SMSES
from corehq.apps.es.tests.utils import ElasticTestMixin
from corehq.elastic import SIZE_LIMIT
class TestSMSES(ElasticTestMixin, SimpleTestCase):
def test_processed_or_incoming(self):
json_output = {
"query": {
"filtered": {
"filter": {
"and": [
{"term": {"domain.exact": "demo"}},
{
"or": (
{
"not": {"term": {"direction": "o"}},
},
{
"not": {"term": {"processed": False}},
}
),
},
{"match_all": {}},
]
},
"query": {"match_all": {}}
}
},
"size": SIZE_LIMIT
}
query = SMSES().domain('demo').processed_or_incoming_messages()
self.checkQuery(query, json_output)
|
c78aa5abc18dda674f607ead5af59ddb4a879ed4 | geozones/models.py | geozones/models.py | # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Common regional zones. All messages can be grouped by this territorial
cluster.
TODO: use django-mptt
TODO: make nested regions
TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
| # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Region
======
Common regional zones. All messages can be grouped by this territorial
cluster.
* TODO: use django-mptt
* TODO: make nested regions
* TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
class Location(models.Model):
'''
Geolocation
===========
This data represent POI(Point Of Interest).
Object contain small piece of data:
* Geocoordinates - latitude and longitude
* Description - textual name of POI or it's human-readable address
* Optional link to georegion. If this link exists
'''
# Geocoordinates
latitude = models.FloatField()
longitude = models.FloatField(db_column='longitude')
# Optional link for region
region = models.ForeignKey(
Region,
verbose_name=_("region"),
null=True, blank=True)
# Short description or address
description = models.CharField(max_length=200)
def __unicode__(self):
return u'%f %f' % (self.latitude, self.longitude)
| Move location from core to geozones | Move location from core to geozones
| Python | mit | sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/flowofkindness | # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Common regional zones. All messages can be grouped by this territorial
cluster.
TODO: use django-mptt
TODO: make nested regions
TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
Move location from core to geozones | # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Region
======
Common regional zones. All messages can be grouped by this territorial
cluster.
* TODO: use django-mptt
* TODO: make nested regions
* TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
class Location(models.Model):
'''
Geolocation
===========
This data represent POI(Point Of Interest).
Object contain small piece of data:
* Geocoordinates - latitude and longitude
* Description - textual name of POI or it's human-readable address
* Optional link to georegion. If this link exists
'''
# Geocoordinates
latitude = models.FloatField()
longitude = models.FloatField(db_column='longitude')
# Optional link for region
region = models.ForeignKey(
Region,
verbose_name=_("region"),
null=True, blank=True)
# Short description or address
description = models.CharField(max_length=200)
def __unicode__(self):
return u'%f %f' % (self.latitude, self.longitude)
| <commit_before># coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Common regional zones. All messages can be grouped by this territorial
cluster.
TODO: use django-mptt
TODO: make nested regions
TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
<commit_msg>Move location from core to geozones<commit_after> | # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Region
======
Common regional zones. All messages can be grouped by this territorial
cluster.
* TODO: use django-mptt
* TODO: make nested regions
* TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
class Location(models.Model):
'''
Geolocation
===========
This data represent POI(Point Of Interest).
Object contain small piece of data:
* Geocoordinates - latitude and longitude
* Description - textual name of POI or it's human-readable address
* Optional link to georegion. If this link exists
'''
# Geocoordinates
latitude = models.FloatField()
longitude = models.FloatField(db_column='longitude')
# Optional link for region
region = models.ForeignKey(
Region,
verbose_name=_("region"),
null=True, blank=True)
# Short description or address
description = models.CharField(max_length=200)
def __unicode__(self):
return u'%f %f' % (self.latitude, self.longitude)
| # coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Common regional zones. All messages can be grouped by this territorial
cluster.
TODO: use django-mptt
TODO: make nested regions
TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
Move location from core to geozones# coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Region
======
Common regional zones. All messages can be grouped by this territorial
cluster.
* TODO: use django-mptt
* TODO: make nested regions
* TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
class Location(models.Model):
'''
Geolocation
===========
This data represent POI(Point Of Interest).
Object contain small piece of data:
* Geocoordinates - latitude and longitude
* Description - textual name of POI or it's human-readable address
* Optional link to georegion. If this link exists
'''
# Geocoordinates
latitude = models.FloatField()
longitude = models.FloatField(db_column='longitude')
# Optional link for region
region = models.ForeignKey(
Region,
verbose_name=_("region"),
null=True, blank=True)
# Short description or address
description = models.CharField(max_length=200)
def __unicode__(self):
return u'%f %f' % (self.latitude, self.longitude)
| <commit_before># coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Common regional zones. All messages can be grouped by this territorial
cluster.
TODO: use django-mptt
TODO: make nested regions
TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
<commit_msg>Move location from core to geozones<commit_after># coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Region(models.Model):
'''
Region
======
Common regional zones. All messages can be grouped by this territorial
cluster.
* TODO: use django-mptt
* TODO: make nested regions
* TODO: link message to nested regions
'''
class Meta():
ordering = ['order']
# Region number
name = models.CharField(max_length=200, verbose_name=_("region name"))
# Region slug
slug = models.SlugField(_("slug"))
# Region center coordinates
latitude = models.FloatField(_("latitude"))
longitude = models.FloatField(_("longitude"))
# Region map default zoom
zoom = models.SmallIntegerField(_("map zoom"))
order = models.IntegerField(_("order"))
def __unicode__(self):
return self.name
class Location(models.Model):
'''
Geolocation
===========
This data represent POI(Point Of Interest).
Object contain small piece of data:
* Geocoordinates - latitude and longitude
* Description - textual name of POI or it's human-readable address
* Optional link to georegion. If this link exists
'''
# Geocoordinates
latitude = models.FloatField()
longitude = models.FloatField(db_column='longitude')
# Optional link for region
region = models.ForeignKey(
Region,
verbose_name=_("region"),
null=True, blank=True)
# Short description or address
description = models.CharField(max_length=200)
def __unicode__(self):
return u'%f %f' % (self.latitude, self.longitude)
|
257e8d2e6d1dc3c10eb7fc26c3deacaf4133bd9b | enactiveagents/view/agentevents.py | enactiveagents/view/agentevents.py | """
Prints a history of agent events to file.
"""
import events
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if event.agent not in self.preparation_history:
self.preparation_history[event.agent] = []
self.preparation_history[event.agent].append(event.action)
elif isinstance(event, events.AgentEnactionEvent):
if event.agent not in self.enaction_history:
self.enaction_history[event.agent] = []
self.enaction_history[event.agent].append(event.action)
elif isinstance(event, events.TickEvent):
pass | """
Prints a history of agent events to file.
"""
import events
import json
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if str(event.agent) not in self.preparation_history:
self.preparation_history[str(event.agent)] = []
self.preparation_history[str(event.agent)].append(str(event.action))
if len(self.preparation_history) > 20:
self.preparation_history.pop(0)
elif isinstance(event, events.AgentEnactionEvent):
if str(event.agent) not in self.enaction_history:
self.enaction_history[str(event.agent)] = []
self.enaction_history[str(event.agent)].append(str(event.action))
if len(self.enaction_history) > 20:
self.enaction_history.pop(0)
elif isinstance(event, events.TickEvent):
self.write_to_file()
def write_to_file(self):
"""
Write the history to the traces file.
"""
d = dict()
d["preparation_history"] = self.preparation_history
d["enaction_history"] = self.enaction_history
with open(self.file_path,'w+') as f:
json.dump(d, f) | Write agent events to a traces history file for the website. | Write agent events to a traces history file for the website.
| Python | mit | Beskhue/enactive-agents,Beskhue/enactive-agents,Beskhue/enactive-agents | """
Prints a history of agent events to file.
"""
import events
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if event.agent not in self.preparation_history:
self.preparation_history[event.agent] = []
self.preparation_history[event.agent].append(event.action)
elif isinstance(event, events.AgentEnactionEvent):
if event.agent not in self.enaction_history:
self.enaction_history[event.agent] = []
self.enaction_history[event.agent].append(event.action)
elif isinstance(event, events.TickEvent):
passWrite agent events to a traces history file for the website. | """
Prints a history of agent events to file.
"""
import events
import json
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if str(event.agent) not in self.preparation_history:
self.preparation_history[str(event.agent)] = []
self.preparation_history[str(event.agent)].append(str(event.action))
if len(self.preparation_history) > 20:
self.preparation_history.pop(0)
elif isinstance(event, events.AgentEnactionEvent):
if str(event.agent) not in self.enaction_history:
self.enaction_history[str(event.agent)] = []
self.enaction_history[str(event.agent)].append(str(event.action))
if len(self.enaction_history) > 20:
self.enaction_history.pop(0)
elif isinstance(event, events.TickEvent):
self.write_to_file()
def write_to_file(self):
"""
Write the history to the traces file.
"""
d = dict()
d["preparation_history"] = self.preparation_history
d["enaction_history"] = self.enaction_history
with open(self.file_path,'w+') as f:
json.dump(d, f) | <commit_before>"""
Prints a history of agent events to file.
"""
import events
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if event.agent not in self.preparation_history:
self.preparation_history[event.agent] = []
self.preparation_history[event.agent].append(event.action)
elif isinstance(event, events.AgentEnactionEvent):
if event.agent not in self.enaction_history:
self.enaction_history[event.agent] = []
self.enaction_history[event.agent].append(event.action)
elif isinstance(event, events.TickEvent):
pass<commit_msg>Write agent events to a traces history file for the website.<commit_after> | """
Prints a history of agent events to file.
"""
import events
import json
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if str(event.agent) not in self.preparation_history:
self.preparation_history[str(event.agent)] = []
self.preparation_history[str(event.agent)].append(str(event.action))
if len(self.preparation_history) > 20:
self.preparation_history.pop(0)
elif isinstance(event, events.AgentEnactionEvent):
if str(event.agent) not in self.enaction_history:
self.enaction_history[str(event.agent)] = []
self.enaction_history[str(event.agent)].append(str(event.action))
if len(self.enaction_history) > 20:
self.enaction_history.pop(0)
elif isinstance(event, events.TickEvent):
self.write_to_file()
def write_to_file(self):
"""
Write the history to the traces file.
"""
d = dict()
d["preparation_history"] = self.preparation_history
d["enaction_history"] = self.enaction_history
with open(self.file_path,'w+') as f:
json.dump(d, f) | """
Prints a history of agent events to file.
"""
import events
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if event.agent not in self.preparation_history:
self.preparation_history[event.agent] = []
self.preparation_history[event.agent].append(event.action)
elif isinstance(event, events.AgentEnactionEvent):
if event.agent not in self.enaction_history:
self.enaction_history[event.agent] = []
self.enaction_history[event.agent].append(event.action)
elif isinstance(event, events.TickEvent):
passWrite agent events to a traces history file for the website."""
Prints a history of agent events to file.
"""
import events
import json
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if str(event.agent) not in self.preparation_history:
self.preparation_history[str(event.agent)] = []
self.preparation_history[str(event.agent)].append(str(event.action))
if len(self.preparation_history) > 20:
self.preparation_history.pop(0)
elif isinstance(event, events.AgentEnactionEvent):
if str(event.agent) not in self.enaction_history:
self.enaction_history[str(event.agent)] = []
self.enaction_history[str(event.agent)].append(str(event.action))
if len(self.enaction_history) > 20:
self.enaction_history.pop(0)
elif isinstance(event, events.TickEvent):
self.write_to_file()
def write_to_file(self):
"""
Write the history to the traces file.
"""
d = dict()
d["preparation_history"] = self.preparation_history
d["enaction_history"] = self.enaction_history
with open(self.file_path,'w+') as f:
json.dump(d, f) | <commit_before>"""
Prints a history of agent events to file.
"""
import events
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if event.agent not in self.preparation_history:
self.preparation_history[event.agent] = []
self.preparation_history[event.agent].append(event.action)
elif isinstance(event, events.AgentEnactionEvent):
if event.agent not in self.enaction_history:
self.enaction_history[event.agent] = []
self.enaction_history[event.agent].append(event.action)
elif isinstance(event, events.TickEvent):
pass<commit_msg>Write agent events to a traces history file for the website.<commit_after>"""
Prints a history of agent events to file.
"""
import events
import json
class AgentEvents(events.EventListener):
"""
View class
"""
def __init__(self, file_path):
"""
:param file_path: The path of the file to output the history to.
"""
self.file_path = file_path
self.preparation_history = dict()
self.enaction_history = dict()
def notify(self, event):
if isinstance(event, events.AgentPreparationEvent):
if str(event.agent) not in self.preparation_history:
self.preparation_history[str(event.agent)] = []
self.preparation_history[str(event.agent)].append(str(event.action))
if len(self.preparation_history) > 20:
self.preparation_history.pop(0)
elif isinstance(event, events.AgentEnactionEvent):
if str(event.agent) not in self.enaction_history:
self.enaction_history[str(event.agent)] = []
self.enaction_history[str(event.agent)].append(str(event.action))
if len(self.enaction_history) > 20:
self.enaction_history.pop(0)
elif isinstance(event, events.TickEvent):
self.write_to_file()
def write_to_file(self):
"""
Write the history to the traces file.
"""
d = dict()
d["preparation_history"] = self.preparation_history
d["enaction_history"] = self.enaction_history
with open(self.file_path,'w+') as f:
json.dump(d, f) |
709bdf06c38ccd9713fb1e92be3102e9b1b1ae59 | nodeconductor/server/test_runner.py | nodeconductor/server/test_runner.py | # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
| # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
except ImportError:
pass
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
| Make setup.py test honor migrations | Make setup.py test honor migrations
Kudos to django-setuptest project
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
Make setup.py test honor migrations
Kudos to django-setuptest project | # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
except ImportError:
pass
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
| <commit_before># This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
<commit_msg>Make setup.py test honor migrations
Kudos to django-setuptest project<commit_after> | # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
except ImportError:
pass
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
| # This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
Make setup.py test honor migrations
Kudos to django-setuptest project# This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
except ImportError:
pass
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
| <commit_before># This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
<commit_msg>Make setup.py test honor migrations
Kudos to django-setuptest project<commit_after># This file mainly exists to allow python setup.py test to work.
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'nodeconductor.server.test_settings'
test_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..'))
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def run_tests():
test_runner_class = get_runner(settings)
try:
from south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
except ImportError:
pass
try:
import xmlrunner
class XMLTestRunner(test_runner_class):
def run_suite(self, suite, **kwargs):
verbosity = getattr(settings, 'TEST_OUTPUT_VERBOSE', 1)
if isinstance(verbosity, bool):
verbosity = (1, 2)[verbosity]
descriptions = getattr(settings, 'TEST_OUTPUT_DESCRIPTIONS', False)
output = getattr(settings, 'TEST_OUTPUT_DIR', '.')
return xmlrunner.XMLTestRunner(
verbosity=verbosity,
descriptions=descriptions,
output=output
).run(suite)
test_runner_class = XMLTestRunner
except ImportError:
print "Not generating XML reports, run 'pip install unittest-xml-reporting' to enable XML report generation"
test_runner = test_runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests([])
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests()
|
3a9a6cb2c98403fc619c8979bdf48102028fd770 | rest/main.py | rest/main.py | import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run()
| import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run(host='0.0.0.0')
| Make the app externally visible | Make the app externally visible
| Python | mit | stevenaubertin/wol.py | import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run()
Make the app externally visible | import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run(host='0.0.0.0')
| <commit_before>import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run()
<commit_msg>Make the app externally visible<commit_after> | import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run(host='0.0.0.0')
| import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run()
Make the app externally visibleimport wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run(host='0.0.0.0')
| <commit_before>import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run()
<commit_msg>Make the app externally visible<commit_after>import wol
import json
from flask import request
from app_factory import create_app
app = create_app(__name__)
@app.route('/help', methods=['GET'])
def help():
return json.dumps({'help message': wol.help_message().strip()})
@app.route('/ports', methods=['GET'])
def get_wol_ports():
return json.dumps({"ports": wol.get_wol_ports()})
@app.route('/wake/<string:mac_address>', methods=['GET'])
def wake(mac_address):
try:
args = request.args.to_dict()
ip = args['ip'] if 'ip' in args else '192.168.1.255'
port = args['port'] if 'port' in args else wol.get_wol_ports()[2]
payload = wol.build_payload(mac_address)
if app.config['DEBUG']:
print 'Mac :', mac_address
print 'Ip :', ip
print 'Port :', port
else:
app.logger.info({
'Mac': mac_address,
'Ip': ip,
'Port': port
})
wol.send(payload, ip, port)
return json.dumps({"success": True})
except Exception as e:
app.logger.error(e.message)
return json.dumps({"error": e.message})
if __name__ == "__main__":
app.run(host='0.0.0.0')
|
d562756f6b48366508db6ef9ffb27e3d5c707845 | root/main.py | root/main.py | from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test') | from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
| Fix bug in example code | Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3) | Python | apache-2.0 | weihanwang/webdriver-python,weihanwang/webdriver-python | from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3) | from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
| <commit_before>from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')<commit_msg>Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3)<commit_after> | from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
| from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3)from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
| <commit_before>from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')<commit_msg>Fix bug in example code
Fixes:
line 6, in query_google
driver, waiter, selector = init()
ValueError: too many values to unpack (expected 3)<commit_after>from .webdriver_util import init
def query_google(keywords):
print("Loading Firefox driver...")
driver, waiter, selector, datapath = init()
print("Fetching google front page...")
driver.get("http://google.com")
print("Taking a screenshot...")
waiter.shoot("frontpage")
print("Typing query string...")
selector.get_and_clear("input[type=text]").send_keys(keywords)
print("Hitting Enter...")
selector.get("button").click()
print("Waiting for results to come back...")
waiter.until_display("#ires")
print
print("The top search result is:")
print
print(' "{}"'.format(selector.get("#ires a").text))
print
if __name__ == '__main__':
query_google('test')
|
92b9b557eef77f7ea4c05c74c1c229a2b508e640 | wsgi/openshift/urls.py | wsgi/openshift/urls.py | from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| Change to get Django 1.5 to work. | Change to get Django 1.5 to work.
| Python | agpl-3.0 | esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck,esplinr/foodcheck | from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Change to get Django 1.5 to work. | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| <commit_before>from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Change to get Django 1.5 to work.<commit_after> | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Change to get Django 1.5 to work.from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| <commit_before>from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Change to get Django 1.5 to work.<commit_after>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'openshift.views.home', name='home'),
# url(r'^openshift/', include('openshift.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
f0c590ef5d8ae98ee10e9c985cf14e626a9ca835 | zou/app/models/task_type.py | zou/app/models/task_type.py | from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
| from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
allow_timelog = db.Column(db.Boolean, default=True)
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
| Add allow_timelog to task type model | Add allow_timelog to task type model
| Python | agpl-3.0 | cgwire/zou | from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
Add allow_timelog to task type model | from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
allow_timelog = db.Column(db.Boolean, default=True)
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
| <commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
<commit_msg>Add allow_timelog to task type model<commit_after> | from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
allow_timelog = db.Column(db.Boolean, default=True)
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
| from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
Add allow_timelog to task type modelfrom sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
allow_timelog = db.Column(db.Boolean, default=True)
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
| <commit_before>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
<commit_msg>Add allow_timelog to task type model<commit_after>from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class TaskType(db.Model, BaseMixin, SerializerMixin):
"""
Categorize tasks in domain areas: modeling, animation, etc.
"""
name = db.Column(db.String(40), nullable=False)
short_name = db.Column(db.String(20))
color = db.Column(db.String(7), default="#FFFFFF")
priority = db.Column(db.Integer, default=1)
for_shots = db.Column(db.Boolean, default=False)
for_entity = db.Column(db.String(30), default="Asset")
allow_timelog = db.Column(db.Boolean, default=True)
shotgun_id = db.Column(db.Integer, index=True)
department_id = db.Column(
UUIDType(binary=False),
db.ForeignKey("department.id")
)
__table_args__ = (
db.UniqueConstraint(
'name',
'for_entity',
'department_id',
name='task_type_uc'
),
)
|
ae70502f910c85f6a4528b487eea3b535cec6c39 | frappe/desk/doctype/tag/test_tag.py | frappe/desk/doctype/tag/test_tag.py | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestTag(unittest.TestCase):
pass
| import unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) | Add test case to validate tag count query | test: Add test case to validate tag count query
| Python | mit | mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,frappe/frappe,frappe/frappe,StrellaGroup/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,yashodhank/frappe,StrellaGroup/frappe,mhbu50/frappe,mhbu50/frappe | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestTag(unittest.TestCase):
pass
test: Add test case to validate tag count query | import unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) | <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestTag(unittest.TestCase):
pass
<commit_msg>test: Add test case to validate tag count query<commit_after> | import unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestTag(unittest.TestCase):
pass
test: Add test case to validate tag count queryimport unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) | <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestTag(unittest.TestCase):
pass
<commit_msg>test: Add test case to validate tag count query<commit_after>import unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) |
a7c210a68a8671137681c55324341c60b256a92b | symantecssl/core.py | symantecssl/core.py | from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
| from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
| Add a slightly higher level API for submitting an order | Add a slightly higher level API for submitting an order
| Python | apache-2.0 | glyph/symantecssl,chelseawinfree/symantecssl,cloudkeep/symantecssl,grigouze/symantecssl,jmvrbanac/symantecssl | from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
Add a slightly higher level API for submitting an order | from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
| <commit_before>from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
<commit_msg>Add a slightly higher level API for submitting an order<commit_after> | from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
| from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
Add a slightly higher level API for submitting an orderfrom __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
| <commit_before>from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
<commit_msg>Add a slightly higher level API for submitting an order<commit_after>from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
|
1062ef4daf124f0dcc056c1e95b7a234642fb36d | mopidy/backends/__init__.py | mopidy/backends/__init__.py | import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
| import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
self.playlist = Playlist()
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
| Add playlist attribute to playlist controller | Add playlist attribute to playlist controller
| Python | apache-2.0 | vrs01/mopidy,dbrgn/mopidy,pacificIT/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,liamw9534/mopidy,rawdlite/mopidy,quartz55/mopidy,pacificIT/mopidy,tkem/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,jmarsik/mopidy,jmarsik/mopidy,jcass77/mopidy,woutervanwijk/mopidy,hkariti/mopidy,quartz55/mopidy,hkariti/mopidy,jcass77/mopidy,adamcik/mopidy,jcass77/mopidy,tkem/mopidy,ali/mopidy,liamw9534/mopidy,kingosticks/mopidy,priestd09/mopidy,pacificIT/mopidy,vrs01/mopidy,jmarsik/mopidy,jodal/mopidy,adamcik/mopidy,bencevans/mopidy,ali/mopidy,bencevans/mopidy,ali/mopidy,priestd09/mopidy,mokieyue/mopidy,swak/mopidy,tkem/mopidy,ZenithDK/mopidy,jodal/mopidy,mopidy/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,swak/mopidy,tkem/mopidy,bencevans/mopidy,rawdlite/mopidy,ali/mopidy,bacontext/mopidy,vrs01/mopidy,mokieyue/mopidy,jodal/mopidy,rawdlite/mopidy,ZenithDK/mopidy,swak/mopidy,diandiankan/mopidy,dbrgn/mopidy,priestd09/mopidy,bencevans/mopidy,quartz55/mopidy,abarisain/mopidy,mopidy/mopidy,swak/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,adamcik/mopidy,SuperStarPL/mopidy,vrs01/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,kingosticks/mopidy,abarisain/mopidy,woutervanwijk/mopidy,dbrgn/mopidy,dbrgn/mopidy,diandiankan/mopidy,mokieyue/mopidy,diandiankan/mopidy,quartz55/mopidy,hkariti/mopidy,glogiotatidis/mopidy,bacontext/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,pacificIT/mopidy | import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
Add playlist attribute to playlist controller | import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
self.playlist = Playlist()
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
| <commit_before>import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
<commit_msg>Add playlist attribute to playlist controller<commit_after> | import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
self.playlist = Playlist()
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
| import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
Add playlist attribute to playlist controllerimport logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
self.playlist = Playlist()
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
| <commit_before>import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
<commit_msg>Add playlist attribute to playlist controller<commit_after>import logging
import time
from mopidy.exceptions import MpdNotImplemented
from mopidy.models import Playlist
logger = logging.getLogger('backends.base')
class BaseBackend(object):
current_playlist = None
library = None
playback = None
stored_playlists = None
uri_handlers = []
class BaseCurrentPlaylistController(object):
def __init__(self, backend):
self.backend = backend
self.playlist = Playlist()
def add(self, track, at_position=None):
raise NotImplementedError
class BasePlaybackController(object):
PAUSED = 'paused'
PLAYING = 'playing'
STOPPED = 'stopped'
def __init__(self, backend):
self.backend = backend
self.state = self.STOPPED
self.current_track = None
self.playlist_position = None
def play(self, id=None, position=None):
raise NotImplementedError
def next(self):
raise NotImplementedError
|
3a8a7661c0aad111dbaace178062352b30f7fac5 | numcodecs/tests/__init__.py | numcodecs/tests/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import pytest
pytest.register_assert_rewrite('numcodecs.tests.common')
| Enable pytest rewriting in test helper functions. | Enable pytest rewriting in test helper functions.
| Python | mit | alimanfoo/numcodecs,zarr-developers/numcodecs,alimanfoo/numcodecs | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
Enable pytest rewriting in test helper functions. | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import pytest
pytest.register_assert_rewrite('numcodecs.tests.common')
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
<commit_msg>Enable pytest rewriting in test helper functions.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import pytest
pytest.register_assert_rewrite('numcodecs.tests.common')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
Enable pytest rewriting in test helper functions.# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import pytest
pytest.register_assert_rewrite('numcodecs.tests.common')
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
<commit_msg>Enable pytest rewriting in test helper functions.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import pytest
pytest.register_assert_rewrite('numcodecs.tests.common')
|
535d1f1ea3f229a0831830c4d19e7547e2b2ddab | cosmic/__init__.py | cosmic/__init__.py | from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
_global_cosmos = Cosmos()
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
| from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
import teleport
_global_cosmos = Cosmos()
# Temporary hack.
teleport._global_map = _global_cosmos
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
| Add temporary hack to make teleport work with global Cosmos context | Add temporary hack to make teleport work with global Cosmos context
| Python | mit | cosmic-api/cosmic.py | from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
_global_cosmos = Cosmos()
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
Add temporary hack to make teleport work with global Cosmos context | from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
import teleport
_global_cosmos = Cosmos()
# Temporary hack.
teleport._global_map = _global_cosmos
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
| <commit_before>from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
_global_cosmos = Cosmos()
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
<commit_msg>Add temporary hack to make teleport work with global Cosmos context<commit_after> | from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
import teleport
_global_cosmos = Cosmos()
# Temporary hack.
teleport._global_map = _global_cosmos
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
| from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
_global_cosmos = Cosmos()
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
Add temporary hack to make teleport work with global Cosmos contextfrom werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
import teleport
_global_cosmos = Cosmos()
# Temporary hack.
teleport._global_map = _global_cosmos
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
| <commit_before>from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
_global_cosmos = Cosmos()
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
<commit_msg>Add temporary hack to make teleport work with global Cosmos context<commit_after>from werkzeug.local import LocalProxy, LocalStack
from flask import request
from .models import _ctx_stack, Cosmos
import teleport
_global_cosmos = Cosmos()
# Temporary hack.
teleport._global_map = _global_cosmos
def _get_current_cosmos():
if _ctx_stack.top != None:
return _ctx_stack.top
else:
return _global_cosmos
cosmos = LocalProxy(_get_current_cosmos)
|
e1043bfb410740ab3429ff659e78197b44fefb74 | extract_options.py | extract_options.py | from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$max": "$price_max"}
}
}]))[0]['value']
doc['price_min'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$min": "$price_min"}
}
}]))[0]['value']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
| from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = diners_collection.find_one(sort=[("price_max", -1)])['price_max']
doc['price_min'] = diners_collection.find_one(sort=[("price_min", 1)])['price_min']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
| Change get min, max value method | Change get min, max value method
| Python | mit | earlwlkr/POICrawler | from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$max": "$price_max"}
}
}]))[0]['value']
doc['price_min'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$min": "$price_min"}
}
}]))[0]['value']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
Change get min, max value method | from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = diners_collection.find_one(sort=[("price_max", -1)])['price_max']
doc['price_min'] = diners_collection.find_one(sort=[("price_min", 1)])['price_min']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
| <commit_before>from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$max": "$price_max"}
}
}]))[0]['value']
doc['price_min'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$min": "$price_min"}
}
}]))[0]['value']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
<commit_msg>Change get min, max value method<commit_after> | from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = diners_collection.find_one(sort=[("price_max", -1)])['price_max']
doc['price_min'] = diners_collection.find_one(sort=[("price_min", 1)])['price_min']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
| from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$max": "$price_max"}
}
}]))[0]['value']
doc['price_min'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$min": "$price_min"}
}
}]))[0]['value']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
Change get min, max value methodfrom pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = diners_collection.find_one(sort=[("price_max", -1)])['price_max']
doc['price_min'] = diners_collection.find_one(sort=[("price_min", 1)])['price_min']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
| <commit_before>from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$max": "$price_max"}
}
}]))[0]['value']
doc['price_min'] = list(diners_collection.aggregate([{
"$group":
{
"_id": None,
"value": {"$min": "$price_min"}
}
}]))[0]['value']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
<commit_msg>Change get min, max value method<commit_after>from pymongo import MongoClient
def main():
client = MongoClient()
db = client.cityhotspots
db.drop_collection('dineroptions')
diners_collection = db.diners
doc = {}
diner_options_collection = db.dineroptions
doc['categories'] = diners_collection.distinct('category')
doc['categories'].insert(0, 'Tất cả')
doc['cuisines'] = diners_collection.distinct('cuisine')
doc['cuisines'].insert(0, 'Tất cả')
doc['districts'] = diners_collection.distinct('address.district')
doc['districts'].insert(0, 'Tất cả')
doc['price_max'] = diners_collection.find_one(sort=[("price_max", -1)])['price_max']
doc['price_min'] = diners_collection.find_one(sort=[("price_min", 1)])['price_min']
diner_options_collection.insert(doc)
if __name__ == '__main__':
main()
|
bf0990f1e5dda5e78c859dd625638357da5b1ef4 | sir/schema/modelext.py | sir/schema/modelext.py | # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
| # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
| Add a backref from Link to LinkAttribute | Add a backref from Link to LinkAttribute
| Python | mit | jeffweeksio/sir | # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
Add a backref from Link to LinkAttribute | # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
| <commit_before># Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
<commit_msg>Add a backref from Link to LinkAttribute<commit_after> | # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
| # Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
Add a backref from Link to LinkAttribute# Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
| <commit_before># Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
<commit_msg>Add a backref from Link to LinkAttribute<commit_after># Copyright (c) 2014 Lukas Lalinsky, Wieland Hoffmann
# License: MIT, see LICENSE for details
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
|
d6bfac0ac2bc27c8d809467ed6071c5c9a7f5579 | client_test_run.py | client_test_run.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
unittest.TextTestRunner().run(suite)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
import sys
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
sys.exit(1)
| Exit with 1 if client tests fail | Exit with 1 if client tests fail
| Python | mit | lao605/product-definition-center,xychu/product-definition-center,product-definition-center/product-definition-center,release-engineering/product-definition-center,product-definition-center/product-definition-center,pombredanne/product-definition-center,lao605/product-definition-center,pombredanne/product-definition-center,product-definition-center/product-definition-center,xychu/product-definition-center,lao605/product-definition-center,tzhaoredhat/automation,release-engineering/product-definition-center,tzhaoredhat/automation,pombredanne/product-definition-center,lao605/product-definition-center,release-engineering/product-definition-center,tzhaoredhat/automation,xychu/product-definition-center,xychu/product-definition-center,pombredanne/product-definition-center,tzhaoredhat/automation,release-engineering/product-definition-center,product-definition-center/product-definition-center | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
unittest.TextTestRunner().run(suite)
Exit with 1 if client tests fail | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
import sys
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
sys.exit(1)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
unittest.TextTestRunner().run(suite)
<commit_msg>Exit with 1 if client tests fail<commit_after> | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
import sys
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
sys.exit(1)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
unittest.TextTestRunner().run(suite)
Exit with 1 if client tests fail#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
import sys
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
sys.exit(1)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
unittest.TextTestRunner().run(suite)
<commit_msg>Exit with 1 if client tests fail<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import unittest
import argparse
import sys
"""
Use this script either without arguments to run all tests:
python client_test_run.py
or with specific module/test to run only part of the test suite:
python client_test_run.py pdc_client.tests.release.tests.ReleaseTestCase.test_list_all
"""
if __name__ == '__main__':
parser = argparse.ArgumentParser('client_test_run.py')
parser.add_argument('tests', metavar='TEST', nargs='*')
options = parser.parse_args()
loader = unittest.TestLoader()
if options.tests:
suite = loader.loadTestsFromNames(options.tests)
else:
suite = loader.discover('pdc_client/tests', top_level_dir='.')
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
sys.exit(1)
|
10dc45d8e5fea60066b6719b2588fb65566a012f | dakis/api/views.py | dakis/api/views.py | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| Remove deprecated experiment details field from api | Remove deprecated experiment details field from api
| Python | agpl-3.0 | niekas/dakis,niekas/dakis,niekas/dakis | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
Remove deprecated experiment details field from api | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
<commit_msg>Remove deprecated experiment details field from api<commit_after> | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
Remove deprecated experiment details field from apifrom rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author', 'details')
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
<commit_msg>Remove deprecated experiment details field from api<commit_after>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
19952d7f437270065a693dc886c867329ec7c4a0 | startzone.py | startzone.py | import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
| import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
| Fix up some settings for start_zone() | Fix up some settings for start_zone()
| Python | agpl-3.0 | cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO,cnelsonsic/SimpleMMO | import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
Fix up some settings for start_zone() | import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
| <commit_before>import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
<commit_msg>Fix up some settings for start_zone()<commit_after> | import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
| import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
Fix up some settings for start_zone()import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
| <commit_before>import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (port, zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
<commit_msg>Fix up some settings for start_zone()<commit_after>import xmlrpclib
from supervisor.xmlrpc import SupervisorTransport
def start_zone(port=1300, zoneid="defaultzone", processgroup='zones', autorestart=False):
s = xmlrpclib.ServerProxy('http://localhost:9001')
import socket
try:
version = s.twiddler.getAPIVersion()
except(socket.error), exc:
raise UserWarning("Could not connect to supervisor: %s" % exc)
if float(version) >= 0.3:
command = '/usr/bin/python zoneserver.py --port=%d --zoneid=%s' % (int(port), zoneid)
settings = {'command': command, 'autostart': str(True), 'autorestart': str(autorestart), 'redirect_stderr': str(True)}
try:
addtogroup = s.twiddler.addProgramToGroup(processgroup, zoneid, settings)
except(xmlrpclib.Fault), exc:
if "BAD_NAME" in exc.faultString:
raise UserWarning("Zone already exists in process list.")
else:
print exc
print exc.faultCode, exc.faultString
raise
if addtogroup:
return True
else:
raise UserWarning("Couldn't add zone %s to process group." % zoneid)
else:
raise UserWarning("Twiddler version too old.")
if __name__ == "__main__":
if start_zone():
print "Started zone successfully."
|
724a55ded262d4d0986e5a5a3c4c04e145558bea | test/test_device.py | test/test_device.py | from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
| from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
| Add test to get device with non-existant key | Add test to get device with non-existant key
| Python | apache-2.0 | willrogers/pml,willrogers/pml | from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
Add test to get device with non-existant key | from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
| <commit_before>from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
<commit_msg>Add test to get device with non-existant key<commit_after> | from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
| from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
Add test to get device with non-existant keyfrom pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
| <commit_before>from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
<commit_msg>Add test to get device with non-existant key<commit_after>from pml.exceptions import PvException
import pml.device
import pytest
import mock
@pytest.fixture
def create_device(readback, setpoint):
_rb = readback
_sp = setpoint
device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock())
return device
def test_set_device_value():
rb_pv = 'SR01A-PC-SQUAD-01:I'
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device1 = create_device(rb_pv, sp_pv)
device1.put_value(40)
device1._cs.put.assert_called_with(sp_pv, 40)
device2 = create_device(rb_pv, None)
with pytest.raises(PvException):
device2.put_value(40)
def test_get_device_value():
sp_pv = 'SR01A-PC-SQUAD-01:SETI'
device = create_device(None, sp_pv)
with pytest.raises(PvException):
device.get_value('non_existant')
with pytest.raises(PvException):
create_device(None, None)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.