commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9281a33a93b9eec26c5344eaecfecaca42607b51
|
examples/dataflow_chain.py
|
examples/dataflow_chain.py
|
import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "mod")
def CreateTable(mod):
tab = ITable(mod)
st0 = IState(tab)
st1 = IState(tab)
tab.initialSt = st0
design_tool.AddNextState(st0, st1)
tab.states.append(st0)
tab.states.append(st1)
return tab
tab0 = CreateTable(mod)
tab1 = CreateTable(mod)
# Kicks tab0 by external input
ext_input = design_tool.CreateExtInput(tab0, "data_in", 0)
in_insn = IInsn(ext_input)
in_r = IRegister(tab0, "r")
in_r.isWire = True
in_r.valueType = IValueType(False, 0)
in_insn.outputs.append(in_r)
tab0.states[0].insns.append(in_insn)
df_in = design_tool.GetResource(tab0, "dataflow-in")
df_insn = IInsn(df_in)
df_insn.inputs.append(in_r)
tab0.states[0].insns.append(df_insn)
# Kicks tab1
sreg = design_tool.CreateSharedReg(tab0, "o", 0)
sreg.resource_params.AddValue("DEFAULT-VALUE", "0")
sinsn = IInsn(sreg)
bit0 = design_tool.AllocConstNum(tab0, False, 0, 1)
sinsn.inputs.append(bit0)
tab0.states[-1].insns.append(sinsn)
# Kicked by tab0
rreg = design_tool.CreateSharedRegReader(tab1, sreg)
rinsn = IInsn(rreg)
rwire = IRegister(tab1, "r")
rwire.isWire = True
rwire.valueType = IValueType(False, 0)
rinsn.outputs.append(rwire)
tab1.states[0].insns.append(rinsn)
df1_in = design_tool.GetResource(tab1, "dataflow-in")
df1_insn = IInsn(df1_in)
df1_insn.inputs.append(rwire)
tab1.states[0].insns.append(df1_insn)
# Triggers ext port
ext_output = design_tool.CreateExtOutput(tab1, "data_out", 0)
ext_output.resource_params.AddValue("DEFAULT-VALUE", "0")
oinsn = IInsn(ext_output)
bit1 = design_tool.AllocConstNum(tab1, False, 0, 1)
oinsn.inputs.append(bit1)
tab1.states[-1].insns.append(oinsn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
Add an example to chain dataflow tables.
|
Add an example to chain dataflow tables.
|
Python
|
bsd-3-clause
|
nlsynth/iroha,nlsynth/iroha
|
Add an example to chain dataflow tables.
|
import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "mod")
def CreateTable(mod):
tab = ITable(mod)
st0 = IState(tab)
st1 = IState(tab)
tab.initialSt = st0
design_tool.AddNextState(st0, st1)
tab.states.append(st0)
tab.states.append(st1)
return tab
tab0 = CreateTable(mod)
tab1 = CreateTable(mod)
# Kicks tab0 by external input
ext_input = design_tool.CreateExtInput(tab0, "data_in", 0)
in_insn = IInsn(ext_input)
in_r = IRegister(tab0, "r")
in_r.isWire = True
in_r.valueType = IValueType(False, 0)
in_insn.outputs.append(in_r)
tab0.states[0].insns.append(in_insn)
df_in = design_tool.GetResource(tab0, "dataflow-in")
df_insn = IInsn(df_in)
df_insn.inputs.append(in_r)
tab0.states[0].insns.append(df_insn)
# Kicks tab1
sreg = design_tool.CreateSharedReg(tab0, "o", 0)
sreg.resource_params.AddValue("DEFAULT-VALUE", "0")
sinsn = IInsn(sreg)
bit0 = design_tool.AllocConstNum(tab0, False, 0, 1)
sinsn.inputs.append(bit0)
tab0.states[-1].insns.append(sinsn)
# Kicked by tab0
rreg = design_tool.CreateSharedRegReader(tab1, sreg)
rinsn = IInsn(rreg)
rwire = IRegister(tab1, "r")
rwire.isWire = True
rwire.valueType = IValueType(False, 0)
rinsn.outputs.append(rwire)
tab1.states[0].insns.append(rinsn)
df1_in = design_tool.GetResource(tab1, "dataflow-in")
df1_insn = IInsn(df1_in)
df1_insn.inputs.append(rwire)
tab1.states[0].insns.append(df1_insn)
# Triggers ext port
ext_output = design_tool.CreateExtOutput(tab1, "data_out", 0)
ext_output.resource_params.AddValue("DEFAULT-VALUE", "0")
oinsn = IInsn(ext_output)
bit1 = design_tool.AllocConstNum(tab1, False, 0, 1)
oinsn.inputs.append(bit1)
tab1.states[-1].insns.append(oinsn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
<commit_before><commit_msg>Add an example to chain dataflow tables.<commit_after>
|
import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "mod")
def CreateTable(mod):
tab = ITable(mod)
st0 = IState(tab)
st1 = IState(tab)
tab.initialSt = st0
design_tool.AddNextState(st0, st1)
tab.states.append(st0)
tab.states.append(st1)
return tab
tab0 = CreateTable(mod)
tab1 = CreateTable(mod)
# Kicks tab0 by external input
ext_input = design_tool.CreateExtInput(tab0, "data_in", 0)
in_insn = IInsn(ext_input)
in_r = IRegister(tab0, "r")
in_r.isWire = True
in_r.valueType = IValueType(False, 0)
in_insn.outputs.append(in_r)
tab0.states[0].insns.append(in_insn)
df_in = design_tool.GetResource(tab0, "dataflow-in")
df_insn = IInsn(df_in)
df_insn.inputs.append(in_r)
tab0.states[0].insns.append(df_insn)
# Kicks tab1
sreg = design_tool.CreateSharedReg(tab0, "o", 0)
sreg.resource_params.AddValue("DEFAULT-VALUE", "0")
sinsn = IInsn(sreg)
bit0 = design_tool.AllocConstNum(tab0, False, 0, 1)
sinsn.inputs.append(bit0)
tab0.states[-1].insns.append(sinsn)
# Kicked by tab0
rreg = design_tool.CreateSharedRegReader(tab1, sreg)
rinsn = IInsn(rreg)
rwire = IRegister(tab1, "r")
rwire.isWire = True
rwire.valueType = IValueType(False, 0)
rinsn.outputs.append(rwire)
tab1.states[0].insns.append(rinsn)
df1_in = design_tool.GetResource(tab1, "dataflow-in")
df1_insn = IInsn(df1_in)
df1_insn.inputs.append(rwire)
tab1.states[0].insns.append(df1_insn)
# Triggers ext port
ext_output = design_tool.CreateExtOutput(tab1, "data_out", 0)
ext_output.resource_params.AddValue("DEFAULT-VALUE", "0")
oinsn = IInsn(ext_output)
bit1 = design_tool.AllocConstNum(tab1, False, 0, 1)
oinsn.inputs.append(bit1)
tab1.states[-1].insns.append(oinsn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
Add an example to chain dataflow tables.import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "mod")
def CreateTable(mod):
tab = ITable(mod)
st0 = IState(tab)
st1 = IState(tab)
tab.initialSt = st0
design_tool.AddNextState(st0, st1)
tab.states.append(st0)
tab.states.append(st1)
return tab
tab0 = CreateTable(mod)
tab1 = CreateTable(mod)
# Kicks tab0 by external input
ext_input = design_tool.CreateExtInput(tab0, "data_in", 0)
in_insn = IInsn(ext_input)
in_r = IRegister(tab0, "r")
in_r.isWire = True
in_r.valueType = IValueType(False, 0)
in_insn.outputs.append(in_r)
tab0.states[0].insns.append(in_insn)
df_in = design_tool.GetResource(tab0, "dataflow-in")
df_insn = IInsn(df_in)
df_insn.inputs.append(in_r)
tab0.states[0].insns.append(df_insn)
# Kicks tab1
sreg = design_tool.CreateSharedReg(tab0, "o", 0)
sreg.resource_params.AddValue("DEFAULT-VALUE", "0")
sinsn = IInsn(sreg)
bit0 = design_tool.AllocConstNum(tab0, False, 0, 1)
sinsn.inputs.append(bit0)
tab0.states[-1].insns.append(sinsn)
# Kicked by tab0
rreg = design_tool.CreateSharedRegReader(tab1, sreg)
rinsn = IInsn(rreg)
rwire = IRegister(tab1, "r")
rwire.isWire = True
rwire.valueType = IValueType(False, 0)
rinsn.outputs.append(rwire)
tab1.states[0].insns.append(rinsn)
df1_in = design_tool.GetResource(tab1, "dataflow-in")
df1_insn = IInsn(df1_in)
df1_insn.inputs.append(rwire)
tab1.states[0].insns.append(df1_insn)
# Triggers ext port
ext_output = design_tool.CreateExtOutput(tab1, "data_out", 0)
ext_output.resource_params.AddValue("DEFAULT-VALUE", "0")
oinsn = IInsn(ext_output)
bit1 = design_tool.AllocConstNum(tab1, False, 0, 1)
oinsn.inputs.append(bit1)
tab1.states[-1].insns.append(oinsn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
<commit_before><commit_msg>Add an example to chain dataflow tables.<commit_after>import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "mod")
def CreateTable(mod):
tab = ITable(mod)
st0 = IState(tab)
st1 = IState(tab)
tab.initialSt = st0
design_tool.AddNextState(st0, st1)
tab.states.append(st0)
tab.states.append(st1)
return tab
tab0 = CreateTable(mod)
tab1 = CreateTable(mod)
# Kicks tab0 by external input
ext_input = design_tool.CreateExtInput(tab0, "data_in", 0)
in_insn = IInsn(ext_input)
in_r = IRegister(tab0, "r")
in_r.isWire = True
in_r.valueType = IValueType(False, 0)
in_insn.outputs.append(in_r)
tab0.states[0].insns.append(in_insn)
df_in = design_tool.GetResource(tab0, "dataflow-in")
df_insn = IInsn(df_in)
df_insn.inputs.append(in_r)
tab0.states[0].insns.append(df_insn)
# Kicks tab1
sreg = design_tool.CreateSharedReg(tab0, "o", 0)
sreg.resource_params.AddValue("DEFAULT-VALUE", "0")
sinsn = IInsn(sreg)
bit0 = design_tool.AllocConstNum(tab0, False, 0, 1)
sinsn.inputs.append(bit0)
tab0.states[-1].insns.append(sinsn)
# Kicked by tab0
rreg = design_tool.CreateSharedRegReader(tab1, sreg)
rinsn = IInsn(rreg)
rwire = IRegister(tab1, "r")
rwire.isWire = True
rwire.valueType = IValueType(False, 0)
rinsn.outputs.append(rwire)
tab1.states[0].insns.append(rinsn)
df1_in = design_tool.GetResource(tab1, "dataflow-in")
df1_insn = IInsn(df1_in)
df1_insn.inputs.append(rwire)
tab1.states[0].insns.append(df1_insn)
# Triggers ext port
ext_output = design_tool.CreateExtOutput(tab1, "data_out", 0)
ext_output.resource_params.AddValue("DEFAULT-VALUE", "0")
oinsn = IInsn(ext_output)
bit1 = design_tool.AllocConstNum(tab1, False, 0, 1)
oinsn.inputs.append(bit1)
tab1.states[-1].insns.append(oinsn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
|
1096c336c26c136c94e80b3d2f1cf6060c566de8
|
duralex/GitCommitVisitor.py
|
duralex/GitCommitVisitor.py
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
import subprocess
import os
class GitCommitVisitor(AbstractVisitor):
def visit_article_reference_node(self, node, post):
if self.commitMessage and self.repository:
process = subprocess.Popen(
[
'git',
'-C', self.repository,
'commit',
os.path.basename(node['filename']),
'-m', self.commitMessage,
'--author="DuraLex <duralex@legilibre.fr>"'
],
# shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
out, err = process.communicate()
print(''.join(out))
def visit_node(self, node):
if 'repository' in node:
self.repository = node['repository'];
super(GitCommitVisitor, self).visit_node(node)
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' in node:
self.commitMessage = node['commitMessage']
else:
self.commitMessage = ''
if 'diff' in node:
process = subprocess.Popen(
'patch -p0',
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = process.communicate(input=node['diff'].encode('utf-8') + '\n')
|
Add a visitor to apply the generated diffs as patches and commit them using git.
|
Add a visitor to apply the generated diffs as patches and commit them using git.
|
Python
|
mit
|
Legilibre/duralex
|
Add a visitor to apply the generated diffs as patches and commit them using git.
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
import subprocess
import os
class GitCommitVisitor(AbstractVisitor):
def visit_article_reference_node(self, node, post):
if self.commitMessage and self.repository:
process = subprocess.Popen(
[
'git',
'-C', self.repository,
'commit',
os.path.basename(node['filename']),
'-m', self.commitMessage,
'--author="DuraLex <duralex@legilibre.fr>"'
],
# shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
out, err = process.communicate()
print(''.join(out))
def visit_node(self, node):
if 'repository' in node:
self.repository = node['repository'];
super(GitCommitVisitor, self).visit_node(node)
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' in node:
self.commitMessage = node['commitMessage']
else:
self.commitMessage = ''
if 'diff' in node:
process = subprocess.Popen(
'patch -p0',
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = process.communicate(input=node['diff'].encode('utf-8') + '\n')
|
<commit_before><commit_msg>Add a visitor to apply the generated diffs as patches and commit them using git.<commit_after>
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
import subprocess
import os
class GitCommitVisitor(AbstractVisitor):
def visit_article_reference_node(self, node, post):
if self.commitMessage and self.repository:
process = subprocess.Popen(
[
'git',
'-C', self.repository,
'commit',
os.path.basename(node['filename']),
'-m', self.commitMessage,
'--author="DuraLex <duralex@legilibre.fr>"'
],
# shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
out, err = process.communicate()
print(''.join(out))
def visit_node(self, node):
if 'repository' in node:
self.repository = node['repository'];
super(GitCommitVisitor, self).visit_node(node)
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' in node:
self.commitMessage = node['commitMessage']
else:
self.commitMessage = ''
if 'diff' in node:
process = subprocess.Popen(
'patch -p0',
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = process.communicate(input=node['diff'].encode('utf-8') + '\n')
|
Add a visitor to apply the generated diffs as patches and commit them using git.# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
import subprocess
import os
class GitCommitVisitor(AbstractVisitor):
def visit_article_reference_node(self, node, post):
if self.commitMessage and self.repository:
process = subprocess.Popen(
[
'git',
'-C', self.repository,
'commit',
os.path.basename(node['filename']),
'-m', self.commitMessage,
'--author="DuraLex <duralex@legilibre.fr>"'
],
# shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
out, err = process.communicate()
print(''.join(out))
def visit_node(self, node):
if 'repository' in node:
self.repository = node['repository'];
super(GitCommitVisitor, self).visit_node(node)
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' in node:
self.commitMessage = node['commitMessage']
else:
self.commitMessage = ''
if 'diff' in node:
process = subprocess.Popen(
'patch -p0',
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = process.communicate(input=node['diff'].encode('utf-8') + '\n')
|
<commit_before><commit_msg>Add a visitor to apply the generated diffs as patches and commit them using git.<commit_after># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
import subprocess
import os
class GitCommitVisitor(AbstractVisitor):
def visit_article_reference_node(self, node, post):
if self.commitMessage and self.repository:
process = subprocess.Popen(
[
'git',
'-C', self.repository,
'commit',
os.path.basename(node['filename']),
'-m', self.commitMessage,
'--author="DuraLex <duralex@legilibre.fr>"'
],
# shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
out, err = process.communicate()
print(''.join(out))
def visit_node(self, node):
if 'repository' in node:
self.repository = node['repository'];
super(GitCommitVisitor, self).visit_node(node)
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' in node:
self.commitMessage = node['commitMessage']
else:
self.commitMessage = ''
if 'diff' in node:
process = subprocess.Popen(
'patch -p0',
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = process.communicate(input=node['diff'].encode('utf-8') + '\n')
|
|
03d030b63e7adb2fd5aee3bd710cfbad59ed0864
|
fil_finder/tests/test_input_types.py
|
fil_finder/tests/test_input_types.py
|
# Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
import astropy.units as u
from astropy.io.fits import PrimaryHDU
from fil_finder.io_funcs import input_data
from _testing_data import *
FWHM_FACTOR = 2 * np.sqrt(2 * np.log(2.))
class Test_FilFinder_Input_Types(TestCase):
def test_array_input(self):
output = input_data(img)
npt.assert_equal(img, output["data"])
def test_HDU_input(self):
hdu = PrimaryHDU(img, header=hdr)
output = input_data(hdu)
npt.assert_equal(img, output["data"])
npt.assert_equal(hdr, output["header"])
def test_3D_input(self):
try:
output = input_data(np.ones((3,) * 3))
except Exception, e:
assert isinstance(e, TypeError)
def test_3D_squeezable_input(self):
output = input_data(np.ones((3,3,1)))
npt.assert_equal(np.ones((3,3)), output["data"])
|
Add tests for new input types (FITS HDUs)
|
Add tests for new input types (FITS HDUs)
|
Python
|
mit
|
e-koch/FilFinder
|
Add tests for new input types (FITS HDUs)
|
# Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
import astropy.units as u
from astropy.io.fits import PrimaryHDU
from fil_finder.io_funcs import input_data
from _testing_data import *
FWHM_FACTOR = 2 * np.sqrt(2 * np.log(2.))
class Test_FilFinder_Input_Types(TestCase):
def test_array_input(self):
output = input_data(img)
npt.assert_equal(img, output["data"])
def test_HDU_input(self):
hdu = PrimaryHDU(img, header=hdr)
output = input_data(hdu)
npt.assert_equal(img, output["data"])
npt.assert_equal(hdr, output["header"])
def test_3D_input(self):
try:
output = input_data(np.ones((3,) * 3))
except Exception, e:
assert isinstance(e, TypeError)
def test_3D_squeezable_input(self):
output = input_data(np.ones((3,3,1)))
npt.assert_equal(np.ones((3,3)), output["data"])
|
<commit_before><commit_msg>Add tests for new input types (FITS HDUs)<commit_after>
|
# Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
import astropy.units as u
from astropy.io.fits import PrimaryHDU
from fil_finder.io_funcs import input_data
from _testing_data import *
FWHM_FACTOR = 2 * np.sqrt(2 * np.log(2.))
class Test_FilFinder_Input_Types(TestCase):
def test_array_input(self):
output = input_data(img)
npt.assert_equal(img, output["data"])
def test_HDU_input(self):
hdu = PrimaryHDU(img, header=hdr)
output = input_data(hdu)
npt.assert_equal(img, output["data"])
npt.assert_equal(hdr, output["header"])
def test_3D_input(self):
try:
output = input_data(np.ones((3,) * 3))
except Exception, e:
assert isinstance(e, TypeError)
def test_3D_squeezable_input(self):
output = input_data(np.ones((3,3,1)))
npt.assert_equal(np.ones((3,3)), output["data"])
|
Add tests for new input types (FITS HDUs)# Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
import astropy.units as u
from astropy.io.fits import PrimaryHDU
from fil_finder.io_funcs import input_data
from _testing_data import *
FWHM_FACTOR = 2 * np.sqrt(2 * np.log(2.))
class Test_FilFinder_Input_Types(TestCase):
def test_array_input(self):
output = input_data(img)
npt.assert_equal(img, output["data"])
def test_HDU_input(self):
hdu = PrimaryHDU(img, header=hdr)
output = input_data(hdu)
npt.assert_equal(img, output["data"])
npt.assert_equal(hdr, output["header"])
def test_3D_input(self):
try:
output = input_data(np.ones((3,) * 3))
except Exception, e:
assert isinstance(e, TypeError)
def test_3D_squeezable_input(self):
output = input_data(np.ones((3,3,1)))
npt.assert_equal(np.ones((3,3)), output["data"])
|
<commit_before><commit_msg>Add tests for new input types (FITS HDUs)<commit_after># Licensed under an MIT open source license - see LICENSE
from unittest import TestCase
import numpy as np
import numpy.testing as npt
import astropy.units as u
from astropy.io.fits import PrimaryHDU
from fil_finder.io_funcs import input_data
from _testing_data import *
FWHM_FACTOR = 2 * np.sqrt(2 * np.log(2.))
class Test_FilFinder_Input_Types(TestCase):
def test_array_input(self):
output = input_data(img)
npt.assert_equal(img, output["data"])
def test_HDU_input(self):
hdu = PrimaryHDU(img, header=hdr)
output = input_data(hdu)
npt.assert_equal(img, output["data"])
npt.assert_equal(hdr, output["header"])
def test_3D_input(self):
try:
output = input_data(np.ones((3,) * 3))
except Exception, e:
assert isinstance(e, TypeError)
def test_3D_squeezable_input(self):
output = input_data(np.ones((3,3,1)))
npt.assert_equal(np.ones((3,3)), output["data"])
|
|
e51dfa39ee2fdaee372b3928fb20d33e2830f1ef
|
clubs/migrations/0042_alahsa_deanship.py
|
clubs/migrations/0042_alahsa_deanship.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
alahsa_deanship = Club.objects.create(
name="عمادة شؤون الطلاب",
english_name="Deanship of Student Affairs",
description="",
year=year_2015_2016,
email="studentsclub@ksau-hs.edu.sa",
visible=False,
can_review=True,
can_view_assessments=False,
is_assessed=False,
can_submit_activities=False,
can_edit=False,
can_delete=False,
city="A",
)
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=alahsa_deanship)
def remove_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
riyadh_presidency = Club.objects.get(english_name="Presidency",
year=year_2015_2016,
city="R")
alahsa_deanship = Club.objects.get(english_name="Deanship of Student Affairs",
year=year_2015_2016,
city="A")
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=riyadh_presidency)
alahsa_deanship.delete()
class Migration(migrations.Migration):
dependencies = [
('clubs', '0041_rename_to_miskhat'),
]
operations = [
migrations.RunPython(
add_alahsa_deanship,
reverse_code=remove_alahsa_deanship),
]
|
Add Alahsa Student Affairs Deanship
|
Add Alahsa Student Affairs Deanship
|
Python
|
agpl-3.0
|
enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal
|
Add Alahsa Student Affairs Deanship
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
alahsa_deanship = Club.objects.create(
name="عمادة شؤون الطلاب",
english_name="Deanship of Student Affairs",
description="",
year=year_2015_2016,
email="studentsclub@ksau-hs.edu.sa",
visible=False,
can_review=True,
can_view_assessments=False,
is_assessed=False,
can_submit_activities=False,
can_edit=False,
can_delete=False,
city="A",
)
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=alahsa_deanship)
def remove_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
riyadh_presidency = Club.objects.get(english_name="Presidency",
year=year_2015_2016,
city="R")
alahsa_deanship = Club.objects.get(english_name="Deanship of Student Affairs",
year=year_2015_2016,
city="A")
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=riyadh_presidency)
alahsa_deanship.delete()
class Migration(migrations.Migration):
dependencies = [
('clubs', '0041_rename_to_miskhat'),
]
operations = [
migrations.RunPython(
add_alahsa_deanship,
reverse_code=remove_alahsa_deanship),
]
|
<commit_before><commit_msg>Add Alahsa Student Affairs Deanship<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
alahsa_deanship = Club.objects.create(
name="عمادة شؤون الطلاب",
english_name="Deanship of Student Affairs",
description="",
year=year_2015_2016,
email="studentsclub@ksau-hs.edu.sa",
visible=False,
can_review=True,
can_view_assessments=False,
is_assessed=False,
can_submit_activities=False,
can_edit=False,
can_delete=False,
city="A",
)
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=alahsa_deanship)
def remove_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
riyadh_presidency = Club.objects.get(english_name="Presidency",
year=year_2015_2016,
city="R")
alahsa_deanship = Club.objects.get(english_name="Deanship of Student Affairs",
year=year_2015_2016,
city="A")
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=riyadh_presidency)
alahsa_deanship.delete()
class Migration(migrations.Migration):
dependencies = [
('clubs', '0041_rename_to_miskhat'),
]
operations = [
migrations.RunPython(
add_alahsa_deanship,
reverse_code=remove_alahsa_deanship),
]
|
Add Alahsa Student Affairs Deanship# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
alahsa_deanship = Club.objects.create(
name="عمادة شؤون الطلاب",
english_name="Deanship of Student Affairs",
description="",
year=year_2015_2016,
email="studentsclub@ksau-hs.edu.sa",
visible=False,
can_review=True,
can_view_assessments=False,
is_assessed=False,
can_submit_activities=False,
can_edit=False,
can_delete=False,
city="A",
)
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=alahsa_deanship)
def remove_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
riyadh_presidency = Club.objects.get(english_name="Presidency",
year=year_2015_2016,
city="R")
alahsa_deanship = Club.objects.get(english_name="Deanship of Student Affairs",
year=year_2015_2016,
city="A")
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=riyadh_presidency)
alahsa_deanship.delete()
class Migration(migrations.Migration):
dependencies = [
('clubs', '0041_rename_to_miskhat'),
]
operations = [
migrations.RunPython(
add_alahsa_deanship,
reverse_code=remove_alahsa_deanship),
]
|
<commit_before><commit_msg>Add Alahsa Student Affairs Deanship<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
alahsa_deanship = Club.objects.create(
name="عمادة شؤون الطلاب",
english_name="Deanship of Student Affairs",
description="",
year=year_2015_2016,
email="studentsclub@ksau-hs.edu.sa",
visible=False,
can_review=True,
can_view_assessments=False,
is_assessed=False,
can_submit_activities=False,
can_edit=False,
can_delete=False,
city="A",
)
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=alahsa_deanship)
def remove_alahsa_deanship(apps, schema_editor):
Club = apps.get_model('clubs', 'Club')
StudentClubYear = apps.get_model('core', 'StudentClubYear')
year_2015_2016 = StudentClubYear.objects.get(start_date__year=2015,
end_date__year=2016)
riyadh_presidency = Club.objects.get(english_name="Presidency",
year=year_2015_2016,
city="R")
alahsa_deanship = Club.objects.get(english_name="Deanship of Student Affairs",
year=year_2015_2016,
city="A")
Club.objects.filter(english_name="Presidency (Al-Ahsa)",
year=year_2015_2016).update(parent=riyadh_presidency)
alahsa_deanship.delete()
class Migration(migrations.Migration):
dependencies = [
('clubs', '0041_rename_to_miskhat'),
]
operations = [
migrations.RunPython(
add_alahsa_deanship,
reverse_code=remove_alahsa_deanship),
]
|
|
415a6eed4aeb53c0400326e5a6258ddb2ff39402
|
csunplugged/tests/resources/utils/test_resource_parameters.py
|
csunplugged/tests/resources/utils/test_resource_parameters.py
|
class ResourceParametersTest(BaseTest):
def test_resource_parameter_base_process_requested_values(self):
pass
def test_resource_parameter_base_process_value(self):
pass
def test_resource_parameter_base_html_element(self):
pass
def test_single_valued_parameter_process_requested_values_required_present(self):
pass
def test_single_valued_parameter_process_requested_values_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_not_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_multi_valued_parameter_process_requested_values_single_value(self):
pass
def test_multi_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_enum_resource_parameter_html_element(self):
pass
def test_enum_resource_parameter_process_valid_value(self):
pass
def test_enum_resource_parameter_process_invalid_value(self):
pass
def test_enum_resource_parameter_process_missing_value_with_default(self):
pass
def test_enum_resource_parameter_process_missing_value_without_default(self):
pass
def test_enum_resource_parameter_index(self):
pass
def test_bool_resource_parameter_default_text(self):
pass
def test_bool_resource_parameter_custom_text(self):
pass
def test_bool_resource_parameter_process_value_valid(self):
pass
def test_bool_resource_parameter_process_value_invalid(self):
pass
def test_text_resource_parameter_html_element(self):
pass
def test_text_resource_parameter_html_element_with_placeholder(self):
pass
def test_integer_resource_parameter_html_element(self):
pass
def test_integer_resource_parameter_html_element_with_min_max_default(self):
pass
def test_integer_resource_parameter_process_value(self):
pass
def test_integer_resource_parameter_process_value_invalid_value_type(self):
pass
def test_integer_resource_parameter_process_value_out_of_range(self):
pass
|
Add test skeleton for resource_parameters unit tests
|
Add test skeleton for resource_parameters unit tests
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
Add test skeleton for resource_parameters unit tests
|
class ResourceParametersTest(BaseTest):
def test_resource_parameter_base_process_requested_values(self):
pass
def test_resource_parameter_base_process_value(self):
pass
def test_resource_parameter_base_html_element(self):
pass
def test_single_valued_parameter_process_requested_values_required_present(self):
pass
def test_single_valued_parameter_process_requested_values_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_not_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_multi_valued_parameter_process_requested_values_single_value(self):
pass
def test_multi_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_enum_resource_parameter_html_element(self):
pass
def test_enum_resource_parameter_process_valid_value(self):
pass
def test_enum_resource_parameter_process_invalid_value(self):
pass
def test_enum_resource_parameter_process_missing_value_with_default(self):
pass
def test_enum_resource_parameter_process_missing_value_without_default(self):
pass
def test_enum_resource_parameter_index(self):
pass
def test_bool_resource_parameter_default_text(self):
pass
def test_bool_resource_parameter_custom_text(self):
pass
def test_bool_resource_parameter_process_value_valid(self):
pass
def test_bool_resource_parameter_process_value_invalid(self):
pass
def test_text_resource_parameter_html_element(self):
pass
def test_text_resource_parameter_html_element_with_placeholder(self):
pass
def test_integer_resource_parameter_html_element(self):
pass
def test_integer_resource_parameter_html_element_with_min_max_default(self):
pass
def test_integer_resource_parameter_process_value(self):
pass
def test_integer_resource_parameter_process_value_invalid_value_type(self):
pass
def test_integer_resource_parameter_process_value_out_of_range(self):
pass
|
<commit_before><commit_msg>Add test skeleton for resource_parameters unit tests<commit_after>
|
class ResourceParametersTest(BaseTest):
def test_resource_parameter_base_process_requested_values(self):
pass
def test_resource_parameter_base_process_value(self):
pass
def test_resource_parameter_base_html_element(self):
pass
def test_single_valued_parameter_process_requested_values_required_present(self):
pass
def test_single_valued_parameter_process_requested_values_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_not_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_multi_valued_parameter_process_requested_values_single_value(self):
pass
def test_multi_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_enum_resource_parameter_html_element(self):
pass
def test_enum_resource_parameter_process_valid_value(self):
pass
def test_enum_resource_parameter_process_invalid_value(self):
pass
def test_enum_resource_parameter_process_missing_value_with_default(self):
pass
def test_enum_resource_parameter_process_missing_value_without_default(self):
pass
def test_enum_resource_parameter_index(self):
pass
def test_bool_resource_parameter_default_text(self):
pass
def test_bool_resource_parameter_custom_text(self):
pass
def test_bool_resource_parameter_process_value_valid(self):
pass
def test_bool_resource_parameter_process_value_invalid(self):
pass
def test_text_resource_parameter_html_element(self):
pass
def test_text_resource_parameter_html_element_with_placeholder(self):
pass
def test_integer_resource_parameter_html_element(self):
pass
def test_integer_resource_parameter_html_element_with_min_max_default(self):
pass
def test_integer_resource_parameter_process_value(self):
pass
def test_integer_resource_parameter_process_value_invalid_value_type(self):
pass
def test_integer_resource_parameter_process_value_out_of_range(self):
pass
|
Add test skeleton for resource_parameters unit testsclass ResourceParametersTest(BaseTest):
def test_resource_parameter_base_process_requested_values(self):
pass
def test_resource_parameter_base_process_value(self):
pass
def test_resource_parameter_base_html_element(self):
pass
def test_single_valued_parameter_process_requested_values_required_present(self):
pass
def test_single_valued_parameter_process_requested_values_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_not_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_multi_valued_parameter_process_requested_values_single_value(self):
pass
def test_multi_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_enum_resource_parameter_html_element(self):
pass
def test_enum_resource_parameter_process_valid_value(self):
pass
def test_enum_resource_parameter_process_invalid_value(self):
pass
def test_enum_resource_parameter_process_missing_value_with_default(self):
pass
def test_enum_resource_parameter_process_missing_value_without_default(self):
pass
def test_enum_resource_parameter_index(self):
pass
def test_bool_resource_parameter_default_text(self):
pass
def test_bool_resource_parameter_custom_text(self):
pass
def test_bool_resource_parameter_process_value_valid(self):
pass
def test_bool_resource_parameter_process_value_invalid(self):
pass
def test_text_resource_parameter_html_element(self):
pass
def test_text_resource_parameter_html_element_with_placeholder(self):
pass
def test_integer_resource_parameter_html_element(self):
pass
def test_integer_resource_parameter_html_element_with_min_max_default(self):
pass
def test_integer_resource_parameter_process_value(self):
pass
def test_integer_resource_parameter_process_value_invalid_value_type(self):
pass
def test_integer_resource_parameter_process_value_out_of_range(self):
pass
|
<commit_before><commit_msg>Add test skeleton for resource_parameters unit tests<commit_after>class ResourceParametersTest(BaseTest):
def test_resource_parameter_base_process_requested_values(self):
pass
def test_resource_parameter_base_process_value(self):
pass
def test_resource_parameter_base_html_element(self):
pass
def test_single_valued_parameter_process_requested_values_required_present(self):
pass
def test_single_valued_parameter_process_requested_values_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_not_required_missing(self):
pass
def test_single_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_multi_valued_parameter_process_requested_values_single_value(self):
pass
def test_multi_valued_parameter_process_requested_values_multiple_values(self):
pass
def test_enum_resource_parameter_html_element(self):
pass
def test_enum_resource_parameter_process_valid_value(self):
pass
def test_enum_resource_parameter_process_invalid_value(self):
pass
def test_enum_resource_parameter_process_missing_value_with_default(self):
pass
def test_enum_resource_parameter_process_missing_value_without_default(self):
pass
def test_enum_resource_parameter_index(self):
pass
def test_bool_resource_parameter_default_text(self):
pass
def test_bool_resource_parameter_custom_text(self):
pass
def test_bool_resource_parameter_process_value_valid(self):
pass
def test_bool_resource_parameter_process_value_invalid(self):
pass
def test_text_resource_parameter_html_element(self):
pass
def test_text_resource_parameter_html_element_with_placeholder(self):
pass
def test_integer_resource_parameter_html_element(self):
pass
def test_integer_resource_parameter_html_element_with_min_max_default(self):
pass
def test_integer_resource_parameter_process_value(self):
pass
def test_integer_resource_parameter_process_value_invalid_value_type(self):
pass
def test_integer_resource_parameter_process_value_out_of_range(self):
pass
|
|
e6807ad6d71e3b115828870bb068777ad865f329
|
tests/test_client.py
|
tests/test_client.py
|
import pylibmc
from pylibmc.test import make_test_client
from tests import PylibmcTestCase
from nose.tools import eq_, ok_
class ClientTests(PylibmcTestCase):
def test_zerokey(self):
bc = make_test_client(binary=True)
k = "\x00\x01"
ok_(bc.set(k, "test"))
rk = bc.get_multi([k]).keys()[0]
eq_(k, rk)
def test_cas(self):
k = "testkey"
mc = make_test_client(binary=False, behaviors={"cas": True})
ok_(mc.set(k, 0))
while True:
rv, cas = mc.gets(k)
ok_(mc.cas(k, rv + 1, cas))
if rv == 10:
break
|
Add tests for CAS and other things
|
Add tests for CAS and other things
Refs #63
|
Python
|
bsd-3-clause
|
lericson/pylibmc,lericson/pylibmc,lericson/pylibmc
|
Add tests for CAS and other things
Refs #63
|
import pylibmc
from pylibmc.test import make_test_client
from tests import PylibmcTestCase
from nose.tools import eq_, ok_
class ClientTests(PylibmcTestCase):
def test_zerokey(self):
bc = make_test_client(binary=True)
k = "\x00\x01"
ok_(bc.set(k, "test"))
rk = bc.get_multi([k]).keys()[0]
eq_(k, rk)
def test_cas(self):
k = "testkey"
mc = make_test_client(binary=False, behaviors={"cas": True})
ok_(mc.set(k, 0))
while True:
rv, cas = mc.gets(k)
ok_(mc.cas(k, rv + 1, cas))
if rv == 10:
break
|
<commit_before><commit_msg>Add tests for CAS and other things
Refs #63<commit_after>
|
import pylibmc
from pylibmc.test import make_test_client
from tests import PylibmcTestCase
from nose.tools import eq_, ok_
class ClientTests(PylibmcTestCase):
def test_zerokey(self):
bc = make_test_client(binary=True)
k = "\x00\x01"
ok_(bc.set(k, "test"))
rk = bc.get_multi([k]).keys()[0]
eq_(k, rk)
def test_cas(self):
k = "testkey"
mc = make_test_client(binary=False, behaviors={"cas": True})
ok_(mc.set(k, 0))
while True:
rv, cas = mc.gets(k)
ok_(mc.cas(k, rv + 1, cas))
if rv == 10:
break
|
Add tests for CAS and other things
Refs #63import pylibmc
from pylibmc.test import make_test_client
from tests import PylibmcTestCase
from nose.tools import eq_, ok_
class ClientTests(PylibmcTestCase):
def test_zerokey(self):
bc = make_test_client(binary=True)
k = "\x00\x01"
ok_(bc.set(k, "test"))
rk = bc.get_multi([k]).keys()[0]
eq_(k, rk)
def test_cas(self):
k = "testkey"
mc = make_test_client(binary=False, behaviors={"cas": True})
ok_(mc.set(k, 0))
while True:
rv, cas = mc.gets(k)
ok_(mc.cas(k, rv + 1, cas))
if rv == 10:
break
|
<commit_before><commit_msg>Add tests for CAS and other things
Refs #63<commit_after>import pylibmc
from pylibmc.test import make_test_client
from tests import PylibmcTestCase
from nose.tools import eq_, ok_
class ClientTests(PylibmcTestCase):
def test_zerokey(self):
bc = make_test_client(binary=True)
k = "\x00\x01"
ok_(bc.set(k, "test"))
rk = bc.get_multi([k]).keys()[0]
eq_(k, rk)
def test_cas(self):
k = "testkey"
mc = make_test_client(binary=False, behaviors={"cas": True})
ok_(mc.set(k, 0))
while True:
rv, cas = mc.gets(k)
ok_(mc.cas(k, rv + 1, cas))
if rv == 10:
break
|
|
60d8413940119c64db89ded7854850912947e135
|
var/spack/packages/mbedtls/package.py
|
var/spack/packages/mbedtls/package.py
|
from spack import *
class Mbedtls(Package):
"""
mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
make()
make("install")
|
Support mbedtls, an alternative SSL library
|
Support mbedtls, an alternative SSL library
|
Python
|
lgpl-2.1
|
lgarren/spack,LLNL/spack,tmerrick1/spack,LLNL/spack,TheTimmy/spack,mfherbst/spack,krafczyk/spack,LLNL/spack,iulian787/spack,skosukhin/spack,krafczyk/spack,TheTimmy/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,lgarren/spack,TheTimmy/spack,EmreAtes/spack,TheTimmy/spack,EmreAtes/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,skosukhin/spack,iulian787/spack,lgarren/spack,skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,EmreAtes/spack,LLNL/spack,mfherbst/spack,LLNL/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,tmerrick1/spack,mfherbst/spack,EmreAtes/spack,lgarren/spack,tmerrick1/spack,skosukhin/spack,krafczyk/spack,matthiasdiener/spack
|
Support mbedtls, an alternative SSL library
|
from spack import *
class Mbedtls(Package):
"""
mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
make()
make("install")
|
<commit_before><commit_msg>Support mbedtls, an alternative SSL library<commit_after>
|
from spack import *
class Mbedtls(Package):
"""
mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
make()
make("install")
|
Support mbedtls, an alternative SSL libraryfrom spack import *
class Mbedtls(Package):
"""
mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
make()
make("install")
|
<commit_before><commit_msg>Support mbedtls, an alternative SSL library<commit_after>from spack import *
class Mbedtls(Package):
"""
mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
make()
make("install")
|
|
5c8727c31f3ce628c09e49e10c23b8b67c1dcabe
|
api/tests/test_small_scripts.py
|
api/tests/test_small_scripts.py
|
"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
Add tests for small script files
|
Add tests for small script files
|
Python
|
apache-2.0
|
apel/rest,apel/rest
|
Add tests for small script files
|
"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
<commit_before><commit_msg>Add tests for small script files<commit_after>
|
"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
Add tests for small script files"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
<commit_before><commit_msg>Add tests for small script files<commit_after>"""This module tests the small scripts - admin, model, and wsgi."""
import unittest
class SmallScriptsTest(unittest.TestCase):
def test_admin(self):
import api.admin
def test_models(self):
import api.models
def test_wsgi(self):
import apel_rest.wsgi
|
|
d80a0b0f9d9bf9a3c82695fd2565d22e54d5f4ee
|
exp/viroscopy/test/HIVGraphReaderTest.py
|
exp/viroscopy/test/HIVGraphReaderTest.py
|
import apgl
import numpy
import unittest
import pickle
import numpy.testing as nptst
from exp.viroscopy.HIVGraphReader import HIVGraphReader
class HIVGraphReaderTest(unittest.TestCase):
def setup(self):
pass
def testreadSimulationHIVGraph(self):
hivReader = HIVGraphReader()
graph = hivReader.readSimulationHIVGraph()
print(graph)
#TODO: Test this in much more detail
if __name__ == '__main__':
unittest.main()
|
Test of simulated graph reading.
|
Test of simulated graph reading.
|
Python
|
bsd-3-clause
|
charanpald/APGL
|
Test of simulated graph reading.
|
import apgl
import numpy
import unittest
import pickle
import numpy.testing as nptst
from exp.viroscopy.HIVGraphReader import HIVGraphReader
class HIVGraphReaderTest(unittest.TestCase):
def setup(self):
pass
def testreadSimulationHIVGraph(self):
hivReader = HIVGraphReader()
graph = hivReader.readSimulationHIVGraph()
print(graph)
#TODO: Test this in much more detail
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test of simulated graph reading. <commit_after>
|
import apgl
import numpy
import unittest
import pickle
import numpy.testing as nptst
from exp.viroscopy.HIVGraphReader import HIVGraphReader
class HIVGraphReaderTest(unittest.TestCase):
def setup(self):
pass
def testreadSimulationHIVGraph(self):
hivReader = HIVGraphReader()
graph = hivReader.readSimulationHIVGraph()
print(graph)
#TODO: Test this in much more detail
if __name__ == '__main__':
unittest.main()
|
Test of simulated graph reading.
import apgl
import numpy
import unittest
import pickle
import numpy.testing as nptst
from exp.viroscopy.HIVGraphReader import HIVGraphReader
class HIVGraphReaderTest(unittest.TestCase):
def setup(self):
pass
def testreadSimulationHIVGraph(self):
hivReader = HIVGraphReader()
graph = hivReader.readSimulationHIVGraph()
print(graph)
#TODO: Test this in much more detail
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Test of simulated graph reading. <commit_after>
import apgl
import numpy
import unittest
import pickle
import numpy.testing as nptst
from exp.viroscopy.HIVGraphReader import HIVGraphReader
class HIVGraphReaderTest(unittest.TestCase):
def setup(self):
pass
def testreadSimulationHIVGraph(self):
hivReader = HIVGraphReader()
graph = hivReader.readSimulationHIVGraph()
print(graph)
#TODO: Test this in much more detail
if __name__ == '__main__':
unittest.main()
|
|
4ce9ec033ba13c3345c951a626510f0bd90b26d9
|
monitor.py
|
monitor.py
|
#!/usr/bin/python3
import smbus
import time
import sys
# Get I2C bus
bus = smbus.SMBus(1)
# number of channels the monitoring board has
no_of_channels = 1
# PECMAC125A address, 0x2A(42)
# Command for reading current
# 0x6A(106), 0x01(1), 0x01(1),0x0C(12), 0x00(0), 0x00(0) 0x0A(10)
# Header byte-2, command-1, start channel-1, stop channel-12, byte 5 and 6 reserved, checksum
# ControlEverying.com has a Current Monitoring Reference Guide on their website
# for the board. Instructions are there on how to calculate the checksum
read_power_flow_command = [0x6A, 0x01, 0x01, 0x00, 0x00, 0xFF]
# number of times to read the current
read_times = 10
# seconds to wait between each reading
wait_time = 15
# voltage of the circuit
voltage = 220
for x in range(0, read_times):
print(x)
try:
bus.write_i2c_block_data(0x2A, 0x92, read_power_flow_command)
time.sleep(0.5)
data = bus.read_i2c_block_data(0x2A, 0x55, 4)
# convert the data
msb1 = data[0]
msb = data[1]
lsb = data[2]
amps = (msb1 * 65536 + msb * 256 + lsb) / 1000.0
watts = amps * voltage
print("Current amps : %.3f A" %amps)
print("Current watts : %.3f w" %watts)
time.sleep(wait_time)
# kept having errors with either reading or writing the data
# this error handling just skips that attempt at reading the data
# and continues on to the next one.
except IOError:
print("Error reading or writing to the bus.")
|
Read the data and output to terminal
|
Read the data and output to terminal
|
Python
|
mit
|
jeffdirks/electricity_calc
|
Read the data and output to terminal
|
#!/usr/bin/python3
import smbus
import time
import sys
# Get I2C bus
bus = smbus.SMBus(1)
# number of channels the monitoring board has
no_of_channels = 1
# PECMAC125A address, 0x2A(42)
# Command for reading current
# 0x6A(106), 0x01(1), 0x01(1),0x0C(12), 0x00(0), 0x00(0) 0x0A(10)
# Header byte-2, command-1, start channel-1, stop channel-12, byte 5 and 6 reserved, checksum
# ControlEverying.com has a Current Monitoring Reference Guide on their website
# for the board. Instructions are there on how to calculate the checksum
read_power_flow_command = [0x6A, 0x01, 0x01, 0x00, 0x00, 0xFF]
# number of times to read the current
read_times = 10
# seconds to wait between each reading
wait_time = 15
# voltage of the circuit
voltage = 220
for x in range(0, read_times):
print(x)
try:
bus.write_i2c_block_data(0x2A, 0x92, read_power_flow_command)
time.sleep(0.5)
data = bus.read_i2c_block_data(0x2A, 0x55, 4)
# convert the data
msb1 = data[0]
msb = data[1]
lsb = data[2]
amps = (msb1 * 65536 + msb * 256 + lsb) / 1000.0
watts = amps * voltage
print("Current amps : %.3f A" %amps)
print("Current watts : %.3f w" %watts)
time.sleep(wait_time)
# kept having errors with either reading or writing the data
# this error handling just skips that attempt at reading the data
# and continues on to the next one.
except IOError:
print("Error reading or writing to the bus.")
|
<commit_before><commit_msg>Read the data and output to terminal<commit_after>
|
#!/usr/bin/python3
import smbus
import time
import sys
# Get I2C bus
bus = smbus.SMBus(1)
# number of channels the monitoring board has
no_of_channels = 1
# PECMAC125A address, 0x2A(42)
# Command for reading current
# 0x6A(106), 0x01(1), 0x01(1),0x0C(12), 0x00(0), 0x00(0) 0x0A(10)
# Header byte-2, command-1, start channel-1, stop channel-12, byte 5 and 6 reserved, checksum
# ControlEverying.com has a Current Monitoring Reference Guide on their website
# for the board. Instructions are there on how to calculate the checksum
read_power_flow_command = [0x6A, 0x01, 0x01, 0x00, 0x00, 0xFF]
# number of times to read the current
read_times = 10
# seconds to wait between each reading
wait_time = 15
# voltage of the circuit
voltage = 220
for x in range(0, read_times):
print(x)
try:
bus.write_i2c_block_data(0x2A, 0x92, read_power_flow_command)
time.sleep(0.5)
data = bus.read_i2c_block_data(0x2A, 0x55, 4)
# convert the data
msb1 = data[0]
msb = data[1]
lsb = data[2]
amps = (msb1 * 65536 + msb * 256 + lsb) / 1000.0
watts = amps * voltage
print("Current amps : %.3f A" %amps)
print("Current watts : %.3f w" %watts)
time.sleep(wait_time)
# kept having errors with either reading or writing the data
# this error handling just skips that attempt at reading the data
# and continues on to the next one.
except IOError:
print("Error reading or writing to the bus.")
|
Read the data and output to terminal#!/usr/bin/python3
import smbus
import time
import sys
# Get I2C bus
bus = smbus.SMBus(1)
# number of channels the monitoring board has
no_of_channels = 1
# PECMAC125A address, 0x2A(42)
# Command for reading current
# 0x6A(106), 0x01(1), 0x01(1),0x0C(12), 0x00(0), 0x00(0) 0x0A(10)
# Header byte-2, command-1, start channel-1, stop channel-12, byte 5 and 6 reserved, checksum
# ControlEverying.com has a Current Monitoring Reference Guide on their website
# for the board. Instructions are there on how to calculate the checksum
read_power_flow_command = [0x6A, 0x01, 0x01, 0x00, 0x00, 0xFF]
# number of times to read the current
read_times = 10
# seconds to wait between each reading
wait_time = 15
# voltage of the circuit
voltage = 220
for x in range(0, read_times):
print(x)
try:
bus.write_i2c_block_data(0x2A, 0x92, read_power_flow_command)
time.sleep(0.5)
data = bus.read_i2c_block_data(0x2A, 0x55, 4)
# convert the data
msb1 = data[0]
msb = data[1]
lsb = data[2]
amps = (msb1 * 65536 + msb * 256 + lsb) / 1000.0
watts = amps * voltage
print("Current amps : %.3f A" %amps)
print("Current watts : %.3f w" %watts)
time.sleep(wait_time)
# kept having errors with either reading or writing the data
# this error handling just skips that attempt at reading the data
# and continues on to the next one.
except IOError:
print("Error reading or writing to the bus.")
|
<commit_before><commit_msg>Read the data and output to terminal<commit_after>#!/usr/bin/python3
import smbus
import time
import sys
# Get I2C bus
bus = smbus.SMBus(1)
# number of channels the monitoring board has
no_of_channels = 1
# PECMAC125A address, 0x2A(42)
# Command for reading current
# 0x6A(106), 0x01(1), 0x01(1),0x0C(12), 0x00(0), 0x00(0) 0x0A(10)
# Header byte-2, command-1, start channel-1, stop channel-12, byte 5 and 6 reserved, checksum
# ControlEverying.com has a Current Monitoring Reference Guide on their website
# for the board. Instructions are there on how to calculate the checksum
read_power_flow_command = [0x6A, 0x01, 0x01, 0x00, 0x00, 0xFF]
# number of times to read the current
read_times = 10
# seconds to wait between each reading
wait_time = 15
# voltage of the circuit
voltage = 220
for x in range(0, read_times):
print(x)
try:
bus.write_i2c_block_data(0x2A, 0x92, read_power_flow_command)
time.sleep(0.5)
data = bus.read_i2c_block_data(0x2A, 0x55, 4)
# convert the data
msb1 = data[0]
msb = data[1]
lsb = data[2]
amps = (msb1 * 65536 + msb * 256 + lsb) / 1000.0
watts = amps * voltage
print("Current amps : %.3f A" %amps)
print("Current watts : %.3f w" %watts)
time.sleep(wait_time)
# kept having errors with either reading or writing the data
# this error handling just skips that attempt at reading the data
# and continues on to the next one.
except IOError:
print("Error reading or writing to the bus.")
|
|
4fde76e19df9cb7ac0d7c3b763dc43b9af85a022
|
tsne/tests/test_iris.py
|
tsne/tests/test_iris.py
|
def test_iris():
from tsne import bh_sne
from sklearn.datasets import load_iris
iris = load_iris()
X = iris.data
y = iris.target
X_2d = bh_sne(X)
|
Add test for iris, same as example notebook
|
Add test for iris, same as example notebook
|
Python
|
bsd-3-clause
|
pryvkin10x/tsne,pryvkin10x/tsne,pryvkin10x/tsne
|
Add test for iris, same as example notebook
|
def test_iris():
from tsne import bh_sne
from sklearn.datasets import load_iris
iris = load_iris()
X = iris.data
y = iris.target
X_2d = bh_sne(X)
|
<commit_before><commit_msg>Add test for iris, same as example notebook<commit_after>
|
def test_iris():
from tsne import bh_sne
from sklearn.datasets import load_iris
iris = load_iris()
X = iris.data
y = iris.target
X_2d = bh_sne(X)
|
Add test for iris, same as example notebook
def test_iris():
from tsne import bh_sne
from sklearn.datasets import load_iris
iris = load_iris()
X = iris.data
y = iris.target
X_2d = bh_sne(X)
|
<commit_before><commit_msg>Add test for iris, same as example notebook<commit_after>
def test_iris():
from tsne import bh_sne
from sklearn.datasets import load_iris
iris = load_iris()
X = iris.data
y = iris.target
X_2d = bh_sne(X)
|
|
516c0a7f2b48720445b8b699644d7a33a73481e6
|
ini_tools/get_ini_fields.py
|
ini_tools/get_ini_fields.py
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
Add collect ini field script.
|
Add collect ini field script.
|
Python
|
cc0-1.0
|
haoNoQ/wztools2100,haoNoQ/wztools2100,haoNoQ/wztools2100
|
Add collect ini field script.
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
<commit_before><commit_msg>Add collect ini field script.<commit_after>
|
"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
Add collect ini field script."""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
<commit_before><commit_msg>Add collect ini field script.<commit_after>"""
collect information about fields and values in ini file
usage run script with file name in directory with unpacked stats.
Script will collect data from all files with name.
You can specify path as second argument.
python get_ini_fields.py body.ini
python get_ini_fields.py body.ini "C:/games/warzone2100"
"""
import os
import sys
from config_parser import WZConfigParser
def get_ini_fields(fields, path):
cp = WZConfigParser()
cp.load(path)
for section in cp.sections():
for key, value in cp.items(section):
fields.setdefault(key, set()).add(value)
if __name__ == "__main__":
name = sys.argv[1]
path = sys.argv[2]
fields = {}
for base, dirs, files in os.walk(path):
if name in files:
file_path = os.path.join(base, name)
get_ini_fields(fields, file_path)
print "collectiong data from", file_path
for field, values in fields.items():
print field, ' '.join(sorted(values))
|
|
e38b98a44d5961439e9c60e9398f183dc6dbf39e
|
scripts/profileshader.py
|
scripts/profileshader.py
|
#!/usr/bin/env python
##########################################################################
#
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
import sys
if len(sys.argv) <= 1:
print 'Please specify a file to read'
sys.exit()
shaderTimes = {}
activeShader = 0
for line in open(sys.argv[1], 'r'):
words = line.split(' ')
if line.startswith('#'):
continue
if words[0:3] == ['use','shader','program']:
activeShader = long(words[3])
elif words[0] == 'call':
id = long(words[1])
func = words[-1]
duration = long(words[3])
if shaderTimes.has_key(activeShader):
shaderTimes[activeShader]['draws'] += 1
shaderTimes[activeShader]['duration'] += duration
if duration > shaderTimes[activeShader]['longestDuration']:
shaderTimes[activeShader]['longest'] = id
shaderTimes[activeShader]['longestDuration'] = duration
else:
shaderTimes[activeShader] = {'draws': 1, 'duration': duration, 'longest': id, 'longestDuration': duration}
sortedShaderTimes = sorted(shaderTimes.items(), key=lambda x: x[1]['duration'], reverse=True)
print '+------------+--------------+--------------------+--------------+-------------+'
print '| Shader[id] | Draws [#] | Duration [ns] v | Per Call[ns] | Longest[id] |'
print '+------------+--------------+--------------------+--------------+-------------+'
for shader in sortedShaderTimes:
id = str(shader[0]).rjust(10)
draw = str(shader[1]['draws']).rjust(12)
dura = str(shader[1]['duration']).rjust(18)
perCall = str(shader[1]['duration'] / shader[1]['draws']).rjust(12)
longest = str(shader[1]['longest']).rjust(11)
print "| %s | %s | %s | %s | %s |" % (id, draw, dura, perCall, longest)
print '+------------+--------------+--------------------+--------------+-------------+'
|
Add a script to analyse profile output per shader.
|
Add a script to analyse profile output per shader.
|
Python
|
mit
|
surround-io/apitrace,joshua5201/apitrace,tuanthng/apitrace,surround-io/apitrace,apitrace/apitrace,PeterLValve/apitrace,tuanthng/apitrace,PeterLValve/apitrace,swq0553/apitrace,swq0553/apitrace,tuanthng/apitrace,schulmar/apitrace,trtt/apitrace,surround-io/apitrace,EoD/apitrace,trtt/apitrace,tuanthng/apitrace,swq0553/apitrace,swq0553/apitrace,joshua5201/apitrace,joshua5201/apitrace,EoD/apitrace,surround-io/apitrace,joshua5201/apitrace,apitrace/apitrace,swq0553/apitrace,trtt/apitrace,EoD/apitrace,EoD/apitrace,schulmar/apitrace,apitrace/apitrace,PeterLValve/apitrace,schulmar/apitrace,schulmar/apitrace,trtt/apitrace,joshua5201/apitrace,surround-io/apitrace,schulmar/apitrace,EoD/apitrace,apitrace/apitrace,tuanthng/apitrace,PeterLValve/apitrace,trtt/apitrace
|
Add a script to analyse profile output per shader.
|
#!/usr/bin/env python
##########################################################################
#
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
import sys
if len(sys.argv) <= 1:
print 'Please specify a file to read'
sys.exit()
shaderTimes = {}
activeShader = 0
for line in open(sys.argv[1], 'r'):
words = line.split(' ')
if line.startswith('#'):
continue
if words[0:3] == ['use','shader','program']:
activeShader = long(words[3])
elif words[0] == 'call':
id = long(words[1])
func = words[-1]
duration = long(words[3])
if shaderTimes.has_key(activeShader):
shaderTimes[activeShader]['draws'] += 1
shaderTimes[activeShader]['duration'] += duration
if duration > shaderTimes[activeShader]['longestDuration']:
shaderTimes[activeShader]['longest'] = id
shaderTimes[activeShader]['longestDuration'] = duration
else:
shaderTimes[activeShader] = {'draws': 1, 'duration': duration, 'longest': id, 'longestDuration': duration}
sortedShaderTimes = sorted(shaderTimes.items(), key=lambda x: x[1]['duration'], reverse=True)
print '+------------+--------------+--------------------+--------------+-------------+'
print '| Shader[id] | Draws [#] | Duration [ns] v | Per Call[ns] | Longest[id] |'
print '+------------+--------------+--------------------+--------------+-------------+'
for shader in sortedShaderTimes:
id = str(shader[0]).rjust(10)
draw = str(shader[1]['draws']).rjust(12)
dura = str(shader[1]['duration']).rjust(18)
perCall = str(shader[1]['duration'] / shader[1]['draws']).rjust(12)
longest = str(shader[1]['longest']).rjust(11)
print "| %s | %s | %s | %s | %s |" % (id, draw, dura, perCall, longest)
print '+------------+--------------+--------------------+--------------+-------------+'
|
<commit_before><commit_msg>Add a script to analyse profile output per shader.<commit_after>
|
#!/usr/bin/env python
##########################################################################
#
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
import sys
if len(sys.argv) <= 1:
print 'Please specify a file to read'
sys.exit()
shaderTimes = {}
activeShader = 0
for line in open(sys.argv[1], 'r'):
words = line.split(' ')
if line.startswith('#'):
continue
if words[0:3] == ['use','shader','program']:
activeShader = long(words[3])
elif words[0] == 'call':
id = long(words[1])
func = words[-1]
duration = long(words[3])
if shaderTimes.has_key(activeShader):
shaderTimes[activeShader]['draws'] += 1
shaderTimes[activeShader]['duration'] += duration
if duration > shaderTimes[activeShader]['longestDuration']:
shaderTimes[activeShader]['longest'] = id
shaderTimes[activeShader]['longestDuration'] = duration
else:
shaderTimes[activeShader] = {'draws': 1, 'duration': duration, 'longest': id, 'longestDuration': duration}
sortedShaderTimes = sorted(shaderTimes.items(), key=lambda x: x[1]['duration'], reverse=True)
print '+------------+--------------+--------------------+--------------+-------------+'
print '| Shader[id] | Draws [#] | Duration [ns] v | Per Call[ns] | Longest[id] |'
print '+------------+--------------+--------------------+--------------+-------------+'
for shader in sortedShaderTimes:
id = str(shader[0]).rjust(10)
draw = str(shader[1]['draws']).rjust(12)
dura = str(shader[1]['duration']).rjust(18)
perCall = str(shader[1]['duration'] / shader[1]['draws']).rjust(12)
longest = str(shader[1]['longest']).rjust(11)
print "| %s | %s | %s | %s | %s |" % (id, draw, dura, perCall, longest)
print '+------------+--------------+--------------------+--------------+-------------+'
|
Add a script to analyse profile output per shader.#!/usr/bin/env python
##########################################################################
#
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
import sys
if len(sys.argv) <= 1:
print 'Please specify a file to read'
sys.exit()
shaderTimes = {}
activeShader = 0
for line in open(sys.argv[1], 'r'):
words = line.split(' ')
if line.startswith('#'):
continue
if words[0:3] == ['use','shader','program']:
activeShader = long(words[3])
elif words[0] == 'call':
id = long(words[1])
func = words[-1]
duration = long(words[3])
if shaderTimes.has_key(activeShader):
shaderTimes[activeShader]['draws'] += 1
shaderTimes[activeShader]['duration'] += duration
if duration > shaderTimes[activeShader]['longestDuration']:
shaderTimes[activeShader]['longest'] = id
shaderTimes[activeShader]['longestDuration'] = duration
else:
shaderTimes[activeShader] = {'draws': 1, 'duration': duration, 'longest': id, 'longestDuration': duration}
sortedShaderTimes = sorted(shaderTimes.items(), key=lambda x: x[1]['duration'], reverse=True)
print '+------------+--------------+--------------------+--------------+-------------+'
print '| Shader[id] | Draws [#] | Duration [ns] v | Per Call[ns] | Longest[id] |'
print '+------------+--------------+--------------------+--------------+-------------+'
for shader in sortedShaderTimes:
id = str(shader[0]).rjust(10)
draw = str(shader[1]['draws']).rjust(12)
dura = str(shader[1]['duration']).rjust(18)
perCall = str(shader[1]['duration'] / shader[1]['draws']).rjust(12)
longest = str(shader[1]['longest']).rjust(11)
print "| %s | %s | %s | %s | %s |" % (id, draw, dura, perCall, longest)
print '+------------+--------------+--------------------+--------------+-------------+'
|
<commit_before><commit_msg>Add a script to analyse profile output per shader.<commit_after>#!/usr/bin/env python
##########################################################################
#
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
import sys
if len(sys.argv) <= 1:
print 'Please specify a file to read'
sys.exit()
shaderTimes = {}
activeShader = 0
for line in open(sys.argv[1], 'r'):
words = line.split(' ')
if line.startswith('#'):
continue
if words[0:3] == ['use','shader','program']:
activeShader = long(words[3])
elif words[0] == 'call':
id = long(words[1])
func = words[-1]
duration = long(words[3])
if shaderTimes.has_key(activeShader):
shaderTimes[activeShader]['draws'] += 1
shaderTimes[activeShader]['duration'] += duration
if duration > shaderTimes[activeShader]['longestDuration']:
shaderTimes[activeShader]['longest'] = id
shaderTimes[activeShader]['longestDuration'] = duration
else:
shaderTimes[activeShader] = {'draws': 1, 'duration': duration, 'longest': id, 'longestDuration': duration}
sortedShaderTimes = sorted(shaderTimes.items(), key=lambda x: x[1]['duration'], reverse=True)
print '+------------+--------------+--------------------+--------------+-------------+'
print '| Shader[id] | Draws [#] | Duration [ns] v | Per Call[ns] | Longest[id] |'
print '+------------+--------------+--------------------+--------------+-------------+'
for shader in sortedShaderTimes:
id = str(shader[0]).rjust(10)
draw = str(shader[1]['draws']).rjust(12)
dura = str(shader[1]['duration']).rjust(18)
perCall = str(shader[1]['duration'] / shader[1]['draws']).rjust(12)
longest = str(shader[1]['longest']).rjust(11)
print "| %s | %s | %s | %s | %s |" % (id, draw, dura, perCall, longest)
print '+------------+--------------+--------------------+--------------+-------------+'
|
|
632c29adc4a57c653266c5f3a319f5b761f9a0c4
|
nototools/decompose_ttc.py
|
nototools/decompose_ttc.py
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decompose a TTC file to its pieces."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from fontTools.ttLib import sfnt
def main(argv):
"""Decompose all fonts provided in the command line."""
for font_file_name in argv[1:]:
with open(font_file_name, 'rb') as font_file:
font = sfnt.SFNTReader(font_file, fontNumber=0)
num_fonts = font.numFonts
for font_number in range(num_fonts):
font = ttLib.TTFont(font_file_name, fontNumber=font_number)
font.save('%s-part%d' % (font_file_name, font_number))
if __name__ == '__main__':
main(sys.argv)
|
Add a tool to decompose TTC files.
|
Add a tool to decompose TTC files.
|
Python
|
apache-2.0
|
wskplho/noto-monolithic,yannisl/noto-monolithic,wskplho/noto-monolithic,yannisl/noto-monolithic,yannisl/noto-monolithic,yannisl/noto-monolithic,wskplho/noto-monolithic,wskplho/noto-monolithic
|
Add a tool to decompose TTC files.
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decompose a TTC file to its pieces."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from fontTools.ttLib import sfnt
def main(argv):
"""Decompose all fonts provided in the command line."""
for font_file_name in argv[1:]:
with open(font_file_name, 'rb') as font_file:
font = sfnt.SFNTReader(font_file, fontNumber=0)
num_fonts = font.numFonts
for font_number in range(num_fonts):
font = ttLib.TTFont(font_file_name, fontNumber=font_number)
font.save('%s-part%d' % (font_file_name, font_number))
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a tool to decompose TTC files.<commit_after>
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decompose a TTC file to its pieces."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from fontTools.ttLib import sfnt
def main(argv):
"""Decompose all fonts provided in the command line."""
for font_file_name in argv[1:]:
with open(font_file_name, 'rb') as font_file:
font = sfnt.SFNTReader(font_file, fontNumber=0)
num_fonts = font.numFonts
for font_number in range(num_fonts):
font = ttLib.TTFont(font_file_name, fontNumber=font_number)
font.save('%s-part%d' % (font_file_name, font_number))
if __name__ == '__main__':
main(sys.argv)
|
Add a tool to decompose TTC files.#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decompose a TTC file to its pieces."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from fontTools.ttLib import sfnt
def main(argv):
"""Decompose all fonts provided in the command line."""
for font_file_name in argv[1:]:
with open(font_file_name, 'rb') as font_file:
font = sfnt.SFNTReader(font_file, fontNumber=0)
num_fonts = font.numFonts
for font_number in range(num_fonts):
font = ttLib.TTFont(font_file_name, fontNumber=font_number)
font.save('%s-part%d' % (font_file_name, font_number))
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a tool to decompose TTC files.<commit_after>#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decompose a TTC file to its pieces."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import sys
from fontTools import ttLib
from fontTools.ttLib import sfnt
def main(argv):
"""Decompose all fonts provided in the command line."""
for font_file_name in argv[1:]:
with open(font_file_name, 'rb') as font_file:
font = sfnt.SFNTReader(font_file, fontNumber=0)
num_fonts = font.numFonts
for font_number in range(num_fonts):
font = ttLib.TTFont(font_file_name, fontNumber=font_number)
font.save('%s-part%d' % (font_file_name, font_number))
if __name__ == '__main__':
main(sys.argv)
|
|
b368a24f0ab2abd49957d77c0013e1ed40cf1f42
|
cl/visualizations/migrations/0011_auto_20151203_1631.py
|
cl/visualizations/migrations/0011_auto_20151203_1631.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visualizations', '0010_auto_20151125_1041'),
]
operations = [
migrations.AlterField(
model_name='scotusmap',
name='date_published',
field=models.DateTimeField(help_text=b'The moment when the visualization was first shared', null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='scotusmap',
name='published',
field=models.BooleanField(default=False, help_text=b'Whether the visualization has been shared.'),
),
]
|
Update the DB with the new descriptions.
|
Update the DB with the new descriptions.
This shouldn't be necessary.
|
Python
|
agpl-3.0
|
voutilad/courtlistener,Andr3iC/courtlistener,Andr3iC/courtlistener,voutilad/courtlistener,voutilad/courtlistener,brianwc/courtlistener,brianwc/courtlistener,Andr3iC/courtlistener,voutilad/courtlistener,Andr3iC/courtlistener,voutilad/courtlistener,brianwc/courtlistener,Andr3iC/courtlistener,brianwc/courtlistener,brianwc/courtlistener
|
Update the DB with the new descriptions.
This shouldn't be necessary.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visualizations', '0010_auto_20151125_1041'),
]
operations = [
migrations.AlterField(
model_name='scotusmap',
name='date_published',
field=models.DateTimeField(help_text=b'The moment when the visualization was first shared', null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='scotusmap',
name='published',
field=models.BooleanField(default=False, help_text=b'Whether the visualization has been shared.'),
),
]
|
<commit_before><commit_msg>Update the DB with the new descriptions.
This shouldn't be necessary.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visualizations', '0010_auto_20151125_1041'),
]
operations = [
migrations.AlterField(
model_name='scotusmap',
name='date_published',
field=models.DateTimeField(help_text=b'The moment when the visualization was first shared', null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='scotusmap',
name='published',
field=models.BooleanField(default=False, help_text=b'Whether the visualization has been shared.'),
),
]
|
Update the DB with the new descriptions.
This shouldn't be necessary.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visualizations', '0010_auto_20151125_1041'),
]
operations = [
migrations.AlterField(
model_name='scotusmap',
name='date_published',
field=models.DateTimeField(help_text=b'The moment when the visualization was first shared', null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='scotusmap',
name='published',
field=models.BooleanField(default=False, help_text=b'Whether the visualization has been shared.'),
),
]
|
<commit_before><commit_msg>Update the DB with the new descriptions.
This shouldn't be necessary.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('visualizations', '0010_auto_20151125_1041'),
]
operations = [
migrations.AlterField(
model_name='scotusmap',
name='date_published',
field=models.DateTimeField(help_text=b'The moment when the visualization was first shared', null=True, db_index=True, blank=True),
),
migrations.AlterField(
model_name='scotusmap',
name='published',
field=models.BooleanField(default=False, help_text=b'Whether the visualization has been shared.'),
),
]
|
|
f08dc42802ce0442744f250c4f329db5cc7bae03
|
cms/djangoapps/contentstore/management/commands/tests/test_migrate_to_split.py
|
cms/djangoapps/contentstore/management/commands/tests/test_migrate_to_split.py
|
"""
Unittests for importing a course via management command
"""
import unittest
from django.core.management import CommandError
from contentstore.management.commands.migrate_to_split import Command
class TestArgParsing(unittest.TestCase):
def setUp(self):
self.command = Command()
def test_no_args(self):
errstring = "migrate_to_split requires at least two arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle()
def test_invalid_location(self):
errstring = "Invalid location string"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("foo", "bar")
def test_nonexistant_user_id(self):
errstring = "No user exists with ID 99"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "99")
def test_nonexistant_user_email(self):
errstring = "No user exists with email fake@example.com"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "fake@example.com")
|
Add some dummy arg parsing tests
|
Add some dummy arg parsing tests
|
Python
|
agpl-3.0
|
cpennington/edx-platform,mtlchun/edx,cecep-edu/edx-platform,olexiim/edx-platform,simbs/edx-platform,jbzdak/edx-platform,jbassen/edx-platform,doismellburning/edx-platform,zubair-arbi/edx-platform,proversity-org/edx-platform,chand3040/cloud_that,inares/edx-platform,Shrhawk/edx-platform,benpatterson/edx-platform,edx-solutions/edx-platform,pku9104038/edx-platform,10clouds/edx-platform,10clouds/edx-platform,dsajkl/123,nanolearning/edx-platform,ahmadio/edx-platform,jbassen/edx-platform,benpatterson/edx-platform,synergeticsedx/deployment-wipro,analyseuc3m/ANALYSE-v1,B-MOOC/edx-platform,RPI-OPENEDX/edx-platform,vismartltd/edx-platform,sameetb-cuelogic/edx-platform-test,msegado/edx-platform,teltek/edx-platform,WatanabeYasumasa/edx-platform,rhndg/openedx,rhndg/openedx,unicri/edx-platform,Livit/Livit.Learn.EdX,caesar2164/edx-platform,jazztpt/edx-platform,DNFcode/edx-platform,yokose-ks/edx-platform,jelugbo/tundex,cecep-edu/edx-platform,cyanna/edx-platform,Lektorium-LLC/edx-platform,ubc/edx-platform,ZLLab-Mooc/edx-platform,iivic/BoiseStateX,motion2015/edx-platform,ZLLab-Mooc/edx-platform,4eek/edx-platform,mahendra-r/edx-platform,torchingloom/edx-platform,jbzdak/edx-platform,olexiim/edx-platform,SravanthiSinha/edx-platform,Unow/edx-platform,chudaol/edx-platform,xuxiao19910803/edx,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,DNFcode/edx-platform,eemirtekin/edx-platform,nanolearning/edx-platform,kmoocdev2/edx-platform,kxliugang/edx-platform,itsjeyd/edx-platform,chrisndodge/edx-platform,CredoReference/edx-platform,jruiperezv/ANALYSE,wwj718/ANALYSE,miptliot/edx-platform,devs1991/test_edx_docmode,sudheerchintala/LearnEraPlatForm,IndonesiaX/edx-platform,kursitet/edx-platform,ovnicraft/edx-platform,doganov/edx-platform,defance/edx-platform,Edraak/edraak-platform,y12uc231/edx-platform,JCBarahona/edX,eduNEXT/edunext-platform,UXE/local-edx,eemirtekin/edx-platform,tiagochiavericosta/edx-platform,xingyepei/edx-platform,Shrhawk/edx-platform,louyihua/edx-platform,iivic/BoiseStateX,DNFcode/edx-platform,inares/edx-platform,Edraak/edx-platform,zadgroup/edx-platform,cpennington/edx-platform,leansoft/edx-platform,shubhdev/openedx,jolyonb/edx-platform,mahendra-r/edx-platform,mitocw/edx-platform,defance/edx-platform,xuxiao19910803/edx,TeachAtTUM/edx-platform,eduNEXT/edx-platform,alexthered/kienhoc-platform,jruiperezv/ANALYSE,dcosentino/edx-platform,B-MOOC/edx-platform,shubhdev/edxOnBaadal,shashank971/edx-platform,morenopc/edx-platform,nanolearningllc/edx-platform-cypress,sudheerchintala/LearnEraPlatForm,shubhdev/edxOnBaadal,ahmedaljazzar/edx-platform,mbareta/edx-platform-ft,chudaol/edx-platform,alu042/edx-platform,beacloudgenius/edx-platform,zerobatu/edx-platform,chauhanhardik/populo_2,shabab12/edx-platform,wwj718/ANALYSE,IONISx/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,xinjiguaike/edx-platform,ferabra/edx-platform,LearnEra/LearnEraPlaftform,hastexo/edx-platform,nanolearningllc/edx-platform-cypress-2,UOMx/edx-platform,appliedx/edx-platform,unicri/edx-platform,vikas1885/test1,martynovp/edx-platform,openfun/edx-platform,longmen21/edx-platform,Stanford-Online/edx-platform,Softmotions/edx-platform,dcosentino/edx-platform,motion2015/edx-platform,shabab12/edx-platform,Kalyzee/edx-platform,AkA84/edx-platform,RPI-OPENEDX/edx-platform,ubc/edx-platform,doismellburning/edx-platform,ovnicraft/edx-platform,DefyVentures/edx-platform,zhenzhai/edx-platform,jzoldak/edx-platform,vismartltd/edx-platform,jonathan-beard/edx-platform,hamzehd/edx-platform,hmcmooc/muddx-platform,dsajkl/reqiop,msegado/edx-platform,zofuthan/edx-platform,shurihell/testasia,chrisndodge/edx-platform,ahmadio/edx-platform,hmcmooc/muddx-platform,romain-li/edx-platform,UOMx/edx-platform,etzhou/edx-platform,benpatterson/edx-platform,jbassen/edx-platform,bitifirefly/edx-platform,CredoReference/edx-platform,B-MOOC/edx-platform,jelugbo/tundex,simbs/edx-platform,rismalrv/edx-platform,analyseuc3m/ANALYSE-v1,ferabra/edx-platform,ampax/edx-platform-backup,cecep-edu/edx-platform,marcore/edx-platform,nanolearningllc/edx-platform-cypress,kxliugang/edx-platform,Edraak/edx-platform,wwj718/edx-platform,shurihell/testasia,SivilTaram/edx-platform,mushtaqak/edx-platform,Stanford-Online/edx-platform,jazztpt/edx-platform,jamiefolsom/edx-platform,ubc/edx-platform,polimediaupv/edx-platform,pku9104038/edx-platform,dkarakats/edx-platform,ak2703/edx-platform,deepsrijit1105/edx-platform,pabloborrego93/edx-platform,OmarIthawi/edx-platform,cselis86/edx-platform,IndonesiaX/edx-platform,Endika/edx-platform,auferack08/edx-platform,jswope00/griffinx,arifsetiawan/edx-platform,longmen21/edx-platform,Endika/edx-platform,TeachAtTUM/edx-platform,LICEF/edx-platform,IndonesiaX/edx-platform,dkarakats/edx-platform,Stanford-Online/edx-platform,marcore/edx-platform,MakeHer/edx-platform,y12uc231/edx-platform,nanolearning/edx-platform,cyanna/edx-platform,kursitet/edx-platform,IndonesiaX/edx-platform,doganov/edx-platform,DefyVentures/edx-platform,hkawasaki/kawasaki-aio8-0,philanthropy-u/edx-platform,waheedahmed/edx-platform,mtlchun/edx,WatanabeYasumasa/edx-platform,kursitet/edx-platform,nagyistoce/edx-platform,shashank971/edx-platform,nttks/jenkins-test,hkawasaki/kawasaki-aio8-1,LearnEra/LearnEraPlaftform,antonve/s4-project-mooc,pku9104038/edx-platform,vikas1885/test1,kamalx/edx-platform,motion2015/a3,jolyonb/edx-platform,mjirayu/sit_academy,lduarte1991/edx-platform,leansoft/edx-platform,openfun/edx-platform,ZLLab-Mooc/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,antoviaque/edx-platform,Edraak/edraak-platform,devs1991/test_edx_docmode,shurihell/testasia,edry/edx-platform,martynovp/edx-platform,pepeportela/edx-platform,xinjiguaike/edx-platform,utecuy/edx-platform,Kalyzee/edx-platform,jazztpt/edx-platform,adoosii/edx-platform,Kalyzee/edx-platform,xuxiao19910803/edx-platform,zofuthan/edx-platform,beni55/edx-platform,dkarakats/edx-platform,abdoosh00/edraak,nikolas/edx-platform,pabloborrego93/edx-platform,MSOpenTech/edx-platform,openfun/edx-platform,LearnEra/LearnEraPlaftform,nanolearningllc/edx-platform-cypress-2,ahmadio/edx-platform,shubhdev/edx-platform,beni55/edx-platform,torchingloom/edx-platform,louyihua/edx-platform,jruiperezv/ANALYSE,mbareta/edx-platform-ft,jjmiranda/edx-platform,ampax/edx-platform-backup,ZLLab-Mooc/edx-platform,bdero/edx-platform,Shrhawk/edx-platform,jazkarta/edx-platform,JCBarahona/edX,jazztpt/edx-platform,arifsetiawan/edx-platform,pku9104038/edx-platform,doganov/edx-platform,chauhanhardik/populo,doismellburning/edx-platform,hkawasaki/kawasaki-aio8-0,jamesblunt/edx-platform,franosincic/edx-platform,hastexo/edx-platform,mcgachey/edx-platform,UXE/local-edx,antoviaque/edx-platform,Stanford-Online/edx-platform,MSOpenTech/edx-platform,pomegranited/edx-platform,hastexo/edx-platform,prarthitm/edxplatform,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-1,hkawasaki/kawasaki-aio8-2,torchingloom/edx-platform,mitocw/edx-platform,dsajkl/123,gsehub/edx-platform,bdero/edx-platform,shubhdev/edx-platform,Semi-global/edx-platform,jazkarta/edx-platform,philanthropy-u/edx-platform,bigdatauniversity/edx-platform,shurihell/testasia,nttks/edx-platform,ahmadio/edx-platform,mcgachey/edx-platform,arifsetiawan/edx-platform,carsongee/edx-platform,naresh21/synergetics-edx-platform,motion2015/edx-platform,Lektorium-LLC/edx-platform,stvstnfrd/edx-platform,olexiim/edx-platform,ferabra/edx-platform,prarthitm/edxplatform,arbrandes/edx-platform,wwj718/ANALYSE,amir-qayyum-khan/edx-platform,motion2015/edx-platform,mushtaqak/edx-platform,edry/edx-platform,wwj718/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,Edraak/circleci-edx-platform,CourseTalk/edx-platform,rue89-tech/edx-platform,chauhanhardik/populo,simbs/edx-platform,mcgachey/edx-platform,solashirai/edx-platform,J861449197/edx-platform,chauhanhardik/populo_2,nanolearning/edx-platform,solashirai/edx-platform,cognitiveclass/edx-platform,4eek/edx-platform,beni55/edx-platform,kxliugang/edx-platform,xuxiao19910803/edx,eestay/edx-platform,olexiim/edx-platform,halvertoluke/edx-platform,jolyonb/edx-platform,Kalyzee/edx-platform,ak2703/edx-platform,gymnasium/edx-platform,auferack08/edx-platform,knehez/edx-platform,kmoocdev/edx-platform,nttks/jenkins-test,UXE/local-edx,peterm-itr/edx-platform,etzhou/edx-platform,rue89-tech/edx-platform,antonve/s4-project-mooc,MakeHer/edx-platform,auferack08/edx-platform,wwj718/ANALYSE,playm2mboy/edx-platform,kursitet/edx-platform,SivilTaram/edx-platform,bigdatauniversity/edx-platform,don-github/edx-platform,nikolas/edx-platform,nagyistoce/edx-platform,arifsetiawan/edx-platform,mtlchun/edx,UOMx/edx-platform,shabab12/edx-platform,nttks/edx-platform,EDUlib/edx-platform,jamiefolsom/edx-platform,hastexo/edx-platform,bigdatauniversity/edx-platform,alexthered/kienhoc-platform,zhenzhai/edx-platform,dsajkl/123,DNFcode/edx-platform,SravanthiSinha/edx-platform,J861449197/edx-platform,a-parhom/edx-platform,eestay/edx-platform,tanmaykm/edx-platform,nttks/edx-platform,SivilTaram/edx-platform,zadgroup/edx-platform,xinjiguaike/edx-platform,zubair-arbi/edx-platform,angelapper/edx-platform,dsajkl/123,tanmaykm/edx-platform,dsajkl/123,SravanthiSinha/edx-platform,mbareta/edx-platform-ft,nagyistoce/edx-platform,shubhdev/edx-platform,ampax/edx-platform,knehez/edx-platform,ESOedX/edx-platform,UXE/local-edx,jazkarta/edx-platform-for-isc,10clouds/edx-platform,Unow/edx-platform,morenopc/edx-platform,jazkarta/edx-platform,jamesblunt/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,motion2015/a3,dcosentino/edx-platform,eestay/edx-platform,nttks/edx-platform,amir-qayyum-khan/edx-platform,sameetb-cuelogic/edx-platform-test,beni55/edx-platform,xuxiao19910803/edx-platform,vismartltd/edx-platform,gsehub/edx-platform,synergeticsedx/deployment-wipro,shabab12/edx-platform,devs1991/test_edx_docmode,chand3040/cloud_that,rismalrv/edx-platform,JCBarahona/edX,chand3040/cloud_that,atsolakid/edx-platform,xuxiao19910803/edx-platform,y12uc231/edx-platform,deepsrijit1105/edx-platform,chauhanhardik/populo_2,miptliot/edx-platform,openfun/edx-platform,martynovp/edx-platform,shashank971/edx-platform,rismalrv/edx-platform,shubhdev/edx-platform,caesar2164/edx-platform,procangroup/edx-platform,kxliugang/edx-platform,Livit/Livit.Learn.EdX,CourseTalk/edx-platform,Edraak/edraak-platform,tiagochiavericosta/edx-platform,jswope00/GAI,edx/edx-platform,wwj718/ANALYSE,xuxiao19910803/edx,mjirayu/sit_academy,synergeticsedx/deployment-wipro,openfun/edx-platform,BehavioralInsightsTeam/edx-platform,rhndg/openedx,abdoosh00/edraak,doganov/edx-platform,mitocw/edx-platform,kamalx/edx-platform,edx/edx-platform,atsolakid/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,fintech-circle/edx-platform,cecep-edu/edx-platform,Shrhawk/edx-platform,rue89-tech/edx-platform,JCBarahona/edX,AkA84/edx-platform,naresh21/synergetics-edx-platform,jbzdak/edx-platform,zadgroup/edx-platform,devs1991/test_edx_docmode,etzhou/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,atsolakid/edx-platform,ferabra/edx-platform,mbareta/edx-platform-ft,procangroup/edx-platform,TeachAtTUM/edx-platform,vikas1885/test1,eduNEXT/edunext-platform,longmen21/edx-platform,pomegranited/edx-platform,valtech-mooc/edx-platform,ampax/edx-platform-backup,solashirai/edx-platform,SivilTaram/edx-platform,WatanabeYasumasa/edx-platform,ovnicraft/edx-platform,LICEF/edx-platform,solashirai/edx-platform,DNFcode/edx-platform,mushtaqak/edx-platform,lduarte1991/edx-platform,shubhdev/edxOnBaadal,mahendra-r/edx-platform,MSOpenTech/edx-platform,tiagochiavericosta/edx-platform,zerobatu/edx-platform,inares/edx-platform,mtlchun/edx,cselis86/edx-platform,raccoongang/edx-platform,sameetb-cuelogic/edx-platform-test,antoviaque/edx-platform,jjmiranda/edx-platform,arifsetiawan/edx-platform,JioEducation/edx-platform,hkawasaki/kawasaki-aio8-0,peterm-itr/edx-platform,pomegranited/edx-platform,nanolearning/edx-platform,nikolas/edx-platform,amir-qayyum-khan/edx-platform,teltek/edx-platform,jbzdak/edx-platform,appsembler/edx-platform,eemirtekin/edx-platform,yokose-ks/edx-platform,antonve/s4-project-mooc,chudaol/edx-platform,eemirtekin/edx-platform,andyzsf/edx,deepsrijit1105/edx-platform,kmoocdev/edx-platform,chauhanhardik/populo_2,alexthered/kienhoc-platform,IONISx/edx-platform,valtech-mooc/edx-platform,itsjeyd/edx-platform,longmen21/edx-platform,kmoocdev2/edx-platform,mjirayu/sit_academy,mtlchun/edx,ampax/edx-platform-backup,jamesblunt/edx-platform,nttks/jenkins-test,LearnEra/LearnEraPlaftform,jonathan-beard/edx-platform,analyseuc3m/ANALYSE-v1,LICEF/edx-platform,IONISx/edx-platform,Lektorium-LLC/edx-platform,hkawasaki/kawasaki-aio8-1,vasyarv/edx-platform,waheedahmed/edx-platform,jruiperezv/ANALYSE,BehavioralInsightsTeam/edx-platform,nagyistoce/edx-platform,ZLLab-Mooc/edx-platform,pepeportela/edx-platform,chauhanhardik/populo,fintech-circle/edx-platform,shurihell/testasia,kmoocdev2/edx-platform,knehez/edx-platform,teltek/edx-platform,halvertoluke/edx-platform,procangroup/edx-platform,cyanna/edx-platform,hkawasaki/kawasaki-aio8-1,LICEF/edx-platform,RPI-OPENEDX/edx-platform,B-MOOC/edx-platform,jamiefolsom/edx-platform,jamiefolsom/edx-platform,chrisndodge/edx-platform,don-github/edx-platform,BehavioralInsightsTeam/edx-platform,jamiefolsom/edx-platform,rue89-tech/edx-platform,beacloudgenius/edx-platform,polimediaupv/edx-platform,jzoldak/edx-platform,playm2mboy/edx-platform,sudheerchintala/LearnEraPlatForm,morenopc/edx-platform,ahmadiga/min_edx,MakeHer/edx-platform,mitocw/edx-platform,franosincic/edx-platform,ahmedaljazzar/edx-platform,motion2015/a3,fly19890211/edx-platform,romain-li/edx-platform,doganov/edx-platform,ESOedX/edx-platform,Edraak/circleci-edx-platform,shubhdev/openedx,kamalx/edx-platform,chudaol/edx-platform,jswope00/griffinx,DefyVentures/edx-platform,bigdatauniversity/edx-platform,CredoReference/edx-platform,rue89-tech/edx-platform,zerobatu/edx-platform,benpatterson/edx-platform,Semi-global/edx-platform,dsajkl/reqiop,pabloborrego93/edx-platform,jzoldak/edx-platform,atsolakid/edx-platform,Livit/Livit.Learn.EdX,synergeticsedx/deployment-wipro,ahmadiga/min_edx,motion2015/a3,Ayub-Khan/edx-platform,wwj718/edx-platform,4eek/edx-platform,alu042/edx-platform,rhndg/openedx,pepeportela/edx-platform,unicri/edx-platform,OmarIthawi/edx-platform,Softmotions/edx-platform,knehez/edx-platform,rhndg/openedx,DefyVentures/edx-platform,iivic/BoiseStateX,jazkarta/edx-platform-for-isc,dcosentino/edx-platform,dkarakats/edx-platform,IndonesiaX/edx-platform,MSOpenTech/edx-platform,MSOpenTech/edx-platform,motion2015/a3,bitifirefly/edx-platform,deepsrijit1105/edx-platform,angelapper/edx-platform,stvstnfrd/edx-platform,chrisndodge/edx-platform,jjmiranda/edx-platform,adoosii/edx-platform,naresh21/synergetics-edx-platform,knehez/edx-platform,andyzsf/edx,antoviaque/edx-platform,Kalyzee/edx-platform,CourseTalk/edx-platform,romain-li/edx-platform,mjirayu/sit_academy,Unow/edx-platform,Softmotions/edx-platform,RPI-OPENEDX/edx-platform,alexthered/kienhoc-platform,sameetb-cuelogic/edx-platform-test,adoosii/edx-platform,doismellburning/edx-platform,hmcmooc/muddx-platform,jelugbo/tundex,unicri/edx-platform,Semi-global/edx-platform,hamzehd/edx-platform,leansoft/edx-platform,nttks/jenkins-test,ubc/edx-platform,Ayub-Khan/edx-platform,adoosii/edx-platform,msegado/edx-platform,jswope00/griffinx,edx/edx-platform,mahendra-r/edx-platform,Semi-global/edx-platform,fintech-circle/edx-platform,adoosii/edx-platform,xingyepei/edx-platform,Shrhawk/edx-platform,pepeportela/edx-platform,4eek/edx-platform,Edraak/circleci-edx-platform,stvstnfrd/edx-platform,zadgroup/edx-platform,vismartltd/edx-platform,lduarte1991/edx-platform,tanmaykm/edx-platform,itsjeyd/edx-platform,Edraak/edx-platform,dkarakats/edx-platform,jbassen/edx-platform,ahmedaljazzar/edx-platform,waheedahmed/edx-platform,louyihua/edx-platform,edry/edx-platform,dsajkl/reqiop,andyzsf/edx,nanolearningllc/edx-platform-cypress,msegado/edx-platform,defance/edx-platform,marcore/edx-platform,nttks/jenkins-test,naresh21/synergetics-edx-platform,jbzdak/edx-platform,utecuy/edx-platform,chudaol/edx-platform,solashirai/edx-platform,waheedahmed/edx-platform,jswope00/GAI,don-github/edx-platform,don-github/edx-platform,utecuy/edx-platform,cognitiveclass/edx-platform,gymnasium/edx-platform,zhenzhai/edx-platform,bitifirefly/edx-platform,cselis86/edx-platform,jjmiranda/edx-platform,jswope00/griffinx,nanolearningllc/edx-platform-cypress-2,fintech-circle/edx-platform,nttks/edx-platform,xinjiguaike/edx-platform,hkawasaki/kawasaki-aio8-2,fly19890211/edx-platform,polimediaupv/edx-platform,cecep-edu/edx-platform,shashank971/edx-platform,shubhdev/edx-platform,edx-solutions/edx-platform,iivic/BoiseStateX,jazkarta/edx-platform,polimediaupv/edx-platform,rismalrv/edx-platform,kmoocdev/edx-platform,eestay/edx-platform,inares/edx-platform,xinjiguaike/edx-platform,bitifirefly/edx-platform,zadgroup/edx-platform,jruiperezv/ANALYSE,zubair-arbi/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-0,doismellburning/edx-platform,philanthropy-u/edx-platform,beacloudgenius/edx-platform,Lektorium-LLC/edx-platform,devs1991/test_edx_docmode,mcgachey/edx-platform,hkawasaki/kawasaki-aio8-2,arbrandes/edx-platform,Ayub-Khan/edx-platform,nikolas/edx-platform,rismalrv/edx-platform,y12uc231/edx-platform,cpennington/edx-platform,jolyonb/edx-platform,shubhdev/openedx,bdero/edx-platform,simbs/edx-platform,leansoft/edx-platform,gymnasium/edx-platform,shubhdev/edxOnBaadal,devs1991/test_edx_docmode,raccoongang/edx-platform,chauhanhardik/populo,msegado/edx-platform,waheedahmed/edx-platform,nagyistoce/edx-platform,TeachAtTUM/edx-platform,alu042/edx-platform,Softmotions/edx-platform,franosincic/edx-platform,carsongee/edx-platform,cpennington/edx-platform,zhenzhai/edx-platform,wwj718/edx-platform,10clouds/edx-platform,prarthitm/edxplatform,eduNEXT/edunext-platform,appliedx/edx-platform,valtech-mooc/edx-platform,martynovp/edx-platform,gymnasium/edx-platform,zofuthan/edx-platform,appsembler/edx-platform,UOMx/edx-platform,jonathan-beard/edx-platform,appsembler/edx-platform,halvertoluke/edx-platform,atsolakid/edx-platform,ahmadiga/min_edx,ovnicraft/edx-platform,vasyarv/edx-platform,louyihua/edx-platform,franosincic/edx-platform,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress,B-MOOC/edx-platform,nanolearningllc/edx-platform-cypress-2,IONISx/edx-platform,EDUlib/edx-platform,chand3040/cloud_that,ak2703/edx-platform,ahmadiga/min_edx,tiagochiavericosta/edx-platform,zofuthan/edx-platform,defance/edx-platform,SivilTaram/edx-platform,JCBarahona/edX,BehavioralInsightsTeam/edx-platform,jazkarta/edx-platform-for-isc,romain-li/edx-platform,Ayub-Khan/edx-platform,ferabra/edx-platform,chauhanhardik/populo_2,vikas1885/test1,zhenzhai/edx-platform,beacloudgenius/edx-platform,mcgachey/edx-platform,AkA84/edx-platform,bigdatauniversity/edx-platform,amir-qayyum-khan/edx-platform,teltek/edx-platform,ovnicraft/edx-platform,yokose-ks/edx-platform,franosincic/edx-platform,utecuy/edx-platform,morenopc/edx-platform,xingyepei/edx-platform,procangroup/edx-platform,jbassen/edx-platform,appliedx/edx-platform,alu042/edx-platform,mushtaqak/edx-platform,kmoocdev/edx-platform,nanolearningllc/edx-platform-cypress-2,jamesblunt/edx-platform,ahmadio/edx-platform,torchingloom/edx-platform,polimediaupv/edx-platform,ak2703/edx-platform,zubair-arbi/edx-platform,tanmaykm/edx-platform,antonve/s4-project-mooc,vasyarv/edx-platform,yokose-ks/edx-platform,jzoldak/edx-platform,nikolas/edx-platform,fly19890211/edx-platform,cselis86/edx-platform,itsjeyd/edx-platform,andyzsf/edx,jazztpt/edx-platform,angelapper/edx-platform,carsongee/edx-platform,bdero/edx-platform,jswope00/GAI,kxliugang/edx-platform,Livit/Livit.Learn.EdX,JioEducation/edx-platform,yokose-ks/edx-platform,arbrandes/edx-platform,y12uc231/edx-platform,raccoongang/edx-platform,torchingloom/edx-platform,miptliot/edx-platform,kmoocdev2/edx-platform,iivic/BoiseStateX,ampax/edx-platform,MakeHer/edx-platform,sudheerchintala/LearnEraPlatForm,mahendra-r/edx-platform,chauhanhardik/populo,ESOedX/edx-platform,jonathan-beard/edx-platform,marcore/edx-platform,jamesblunt/edx-platform,ubc/edx-platform,antonve/s4-project-mooc,mushtaqak/edx-platform,analyseuc3m/ANALYSE-v1,philanthropy-u/edx-platform,appliedx/edx-platform,WatanabeYasumasa/edx-platform,jazkarta/edx-platform-for-isc,alexthered/kienhoc-platform,benpatterson/edx-platform,Edraak/circleci-edx-platform,appliedx/edx-platform,prarthitm/edxplatform,DefyVentures/edx-platform,shashank971/edx-platform,inares/edx-platform,pomegranited/edx-platform,edx-solutions/edx-platform,pomegranited/edx-platform,AkA84/edx-platform,carsongee/edx-platform,appsembler/edx-platform,jelugbo/tundex,xuxiao19910803/edx-platform,CourseTalk/edx-platform,motion2015/edx-platform,kursitet/edx-platform,proversity-org/edx-platform,IONISx/edx-platform,xingyepei/edx-platform,lduarte1991/edx-platform,eestay/edx-platform,martynovp/edx-platform,abdoosh00/edraak,Softmotions/edx-platform,MakeHer/edx-platform,etzhou/edx-platform,edry/edx-platform,kamalx/edx-platform,LICEF/edx-platform,peterm-itr/edx-platform,proversity-org/edx-platform,ampax/edx-platform,hamzehd/edx-platform,devs1991/test_edx_docmode,kamalx/edx-platform,valtech-mooc/edx-platform,Edraak/edx-platform,miptliot/edx-platform,ampax/edx-platform,cyanna/edx-platform,vikas1885/test1,SravanthiSinha/edx-platform,J861449197/edx-platform,wwj718/edx-platform,OmarIthawi/edx-platform,JioEducation/edx-platform,hkawasaki/kawasaki-aio8-2,arbrandes/edx-platform,jazkarta/edx-platform,utecuy/edx-platform,vasyarv/edx-platform,4eek/edx-platform,cselis86/edx-platform,Edraak/circleci-edx-platform,eduNEXT/edx-platform,gsehub/edx-platform,halvertoluke/edx-platform,beacloudgenius/edx-platform,etzhou/edx-platform,ahmadiga/min_edx,vasyarv/edx-platform,stvstnfrd/edx-platform,nanolearningllc/edx-platform-cypress,kmoocdev/edx-platform,zerobatu/edx-platform,xingyepei/edx-platform,J861449197/edx-platform,raccoongang/edx-platform,unicri/edx-platform,Endika/edx-platform,beni55/edx-platform,chand3040/cloud_that,peterm-itr/edx-platform,xuxiao19910803/edx,halvertoluke/edx-platform,shubhdev/openedx,leansoft/edx-platform,cyanna/edx-platform,jswope00/griffinx,valtech-mooc/edx-platform,cognitiveclass/edx-platform,Semi-global/edx-platform,hamzehd/edx-platform,simbs/edx-platform,morenopc/edx-platform,JioEducation/edx-platform,playm2mboy/edx-platform,don-github/edx-platform,jswope00/GAI,EDUlib/edx-platform,J861449197/edx-platform,edx/edx-platform,cognitiveclass/edx-platform,proversity-org/edx-platform,zerobatu/edx-platform,edry/edx-platform,jelugbo/tundex,abdoosh00/edraak,dsajkl/reqiop,shubhdev/openedx,OmarIthawi/edx-platform,eduNEXT/edunext-platform,jonathan-beard/edx-platform,AkA84/edx-platform,Ayub-Khan/edx-platform,tiagochiavericosta/edx-platform,fly19890211/edx-platform,zubair-arbi/edx-platform,caesar2164/edx-platform,cognitiveclass/edx-platform,a-parhom/edx-platform,shubhdev/edxOnBaadal,Endika/edx-platform,devs1991/test_edx_docmode,playm2mboy/edx-platform,pabloborrego93/edx-platform,ampax/edx-platform-backup,hmcmooc/muddx-platform,fly19890211/edx-platform,Unow/edx-platform,bitifirefly/edx-platform,auferack08/edx-platform,vismartltd/edx-platform,jazkarta/edx-platform-for-isc,edx-solutions/edx-platform,a-parhom/edx-platform,xuxiao19910803/edx-platform,mjirayu/sit_academy,a-parhom/edx-platform,CredoReference/edx-platform,olexiim/edx-platform,ak2703/edx-platform,dcosentino/edx-platform,Edraak/edx-platform,playm2mboy/edx-platform
|
Add some dummy arg parsing tests
|
"""
Unittests for importing a course via management command
"""
import unittest
from django.core.management import CommandError
from contentstore.management.commands.migrate_to_split import Command
class TestArgParsing(unittest.TestCase):
def setUp(self):
self.command = Command()
def test_no_args(self):
errstring = "migrate_to_split requires at least two arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle()
def test_invalid_location(self):
errstring = "Invalid location string"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("foo", "bar")
def test_nonexistant_user_id(self):
errstring = "No user exists with ID 99"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "99")
def test_nonexistant_user_email(self):
errstring = "No user exists with email fake@example.com"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "fake@example.com")
|
<commit_before><commit_msg>Add some dummy arg parsing tests<commit_after>
|
"""
Unittests for importing a course via management command
"""
import unittest
from django.core.management import CommandError
from contentstore.management.commands.migrate_to_split import Command
class TestArgParsing(unittest.TestCase):
def setUp(self):
self.command = Command()
def test_no_args(self):
errstring = "migrate_to_split requires at least two arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle()
def test_invalid_location(self):
errstring = "Invalid location string"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("foo", "bar")
def test_nonexistant_user_id(self):
errstring = "No user exists with ID 99"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "99")
def test_nonexistant_user_email(self):
errstring = "No user exists with email fake@example.com"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "fake@example.com")
|
Add some dummy arg parsing tests"""
Unittests for importing a course via management command
"""
import unittest
from django.core.management import CommandError
from contentstore.management.commands.migrate_to_split import Command
class TestArgParsing(unittest.TestCase):
def setUp(self):
self.command = Command()
def test_no_args(self):
errstring = "migrate_to_split requires at least two arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle()
def test_invalid_location(self):
errstring = "Invalid location string"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("foo", "bar")
def test_nonexistant_user_id(self):
errstring = "No user exists with ID 99"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "99")
def test_nonexistant_user_email(self):
errstring = "No user exists with email fake@example.com"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "fake@example.com")
|
<commit_before><commit_msg>Add some dummy arg parsing tests<commit_after>"""
Unittests for importing a course via management command
"""
import unittest
from django.core.management import CommandError
from contentstore.management.commands.migrate_to_split import Command
class TestArgParsing(unittest.TestCase):
def setUp(self):
self.command = Command()
def test_no_args(self):
errstring = "migrate_to_split requires at least two arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle()
def test_invalid_location(self):
errstring = "Invalid location string"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("foo", "bar")
def test_nonexistant_user_id(self):
errstring = "No user exists with ID 99"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "99")
def test_nonexistant_user_email(self):
errstring = "No user exists with email fake@example.com"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("i4x://org/course/category/name", "fake@example.com")
|
|
6c6442bf232c36314c5d195cd2674a98397b64ed
|
src/mailme/migrations/0006_auto_20180221_0758.py
|
src/mailme/migrations/0006_auto_20180221_0758.py
|
# Generated by Django 2.0 on 2018-02-21 07:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mailme', '0005_auto_20160724_1415'),
]
operations = [
migrations.RemoveField(
model_name='thread',
name='mailbox',
),
migrations.RemoveField(
model_name='message',
name='references',
),
migrations.RemoveField(
model_name='message',
name='thread',
),
migrations.AlterField(
model_name='mailbox',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='mailboxfolder',
name='mailbox',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='folders', to='mailme.Mailbox'),
),
migrations.AlterField(
model_name='message',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='mailme.MailboxFolder'),
),
migrations.DeleteModel(
name='Thread',
),
]
|
Add missing migration for recent model refactor
|
Add missing migration for recent model refactor
|
Python
|
bsd-3-clause
|
mailme/mailme,mailme/mailme
|
Add missing migration for recent model refactor
|
# Generated by Django 2.0 on 2018-02-21 07:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mailme', '0005_auto_20160724_1415'),
]
operations = [
migrations.RemoveField(
model_name='thread',
name='mailbox',
),
migrations.RemoveField(
model_name='message',
name='references',
),
migrations.RemoveField(
model_name='message',
name='thread',
),
migrations.AlterField(
model_name='mailbox',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='mailboxfolder',
name='mailbox',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='folders', to='mailme.Mailbox'),
),
migrations.AlterField(
model_name='message',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='mailme.MailboxFolder'),
),
migrations.DeleteModel(
name='Thread',
),
]
|
<commit_before><commit_msg>Add missing migration for recent model refactor<commit_after>
|
# Generated by Django 2.0 on 2018-02-21 07:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mailme', '0005_auto_20160724_1415'),
]
operations = [
migrations.RemoveField(
model_name='thread',
name='mailbox',
),
migrations.RemoveField(
model_name='message',
name='references',
),
migrations.RemoveField(
model_name='message',
name='thread',
),
migrations.AlterField(
model_name='mailbox',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='mailboxfolder',
name='mailbox',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='folders', to='mailme.Mailbox'),
),
migrations.AlterField(
model_name='message',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='mailme.MailboxFolder'),
),
migrations.DeleteModel(
name='Thread',
),
]
|
Add missing migration for recent model refactor# Generated by Django 2.0 on 2018-02-21 07:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mailme', '0005_auto_20160724_1415'),
]
operations = [
migrations.RemoveField(
model_name='thread',
name='mailbox',
),
migrations.RemoveField(
model_name='message',
name='references',
),
migrations.RemoveField(
model_name='message',
name='thread',
),
migrations.AlterField(
model_name='mailbox',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='mailboxfolder',
name='mailbox',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='folders', to='mailme.Mailbox'),
),
migrations.AlterField(
model_name='message',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='mailme.MailboxFolder'),
),
migrations.DeleteModel(
name='Thread',
),
]
|
<commit_before><commit_msg>Add missing migration for recent model refactor<commit_after># Generated by Django 2.0 on 2018-02-21 07:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mailme', '0005_auto_20160724_1415'),
]
operations = [
migrations.RemoveField(
model_name='thread',
name='mailbox',
),
migrations.RemoveField(
model_name='message',
name='references',
),
migrations.RemoveField(
model_name='message',
name='thread',
),
migrations.AlterField(
model_name='mailbox',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='mailboxfolder',
name='mailbox',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='folders', to='mailme.Mailbox'),
),
migrations.AlterField(
model_name='message',
name='folder',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='messages', to='mailme.MailboxFolder'),
),
migrations.DeleteModel(
name='Thread',
),
]
|
|
f12bf6096e607e090da7c4e80be2bed3afb5ff5a
|
crmapp/contacts/urls.py
|
crmapp/contacts/urls.py
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
Create the Contacts App - Part II > Edit Contact - Create URL
|
Create the Contacts App - Part II > Edit Contact - Create URL
|
Python
|
mit
|
tabdon/crmeasyapp,tabdon/crmeasyapp,deenaariff/Django
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
Create the Contacts App - Part II > Edit Contact - Create URL
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
<commit_before>from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
<commit_msg>Create the Contacts App - Part II > Edit Contact - Create URL<commit_after>
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
Create the Contacts App - Part II > Edit Contact - Create URLfrom django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
<commit_before>from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
)
<commit_msg>Create the Contacts App - Part II > Edit Contact - Create URL<commit_after>from django.conf.urls import patterns, url
contact_urls = patterns('',
url(r'^$', 'crmapp.contacts.views.contact_detail', name="contact_detail"),
url(r'^edit/$',
'crmapp.contacts.views.contact_cru', name='contact_update'
),
)
|
d6b80a6510cbedfa6295d90bf72901aea3e8c5ba
|
toPathwayReactome.py
|
toPathwayReactome.py
|
import requests
def httpRequestReactome(code):
try:
r = requests.get('http://www.reactome.org/ContentService/search/query?query=' + code +'&cluster=true')
results = r.json()
resultId = results['results'][0]['entries'][0]['stId']
r = requests.get('http://www.reactome.org/ContentService/data/pathways/low/entity/' + resultId)
print(r.json())
except:
print(r.json()['messages'][0])
return r.text
def main():
test = httpRequestReactome('0007264')
main()
|
Add an example of pathway extraction from Reaction.
|
Add an example of pathway extraction from Reaction.
|
Python
|
agpl-3.0
|
ArnaudBelcour/Workflow_GeneList_Analysis,ArnaudBelcour/Workflow_GeneList_Analysis
|
Add an example of pathway extraction from Reaction.
|
import requests
def httpRequestReactome(code):
try:
r = requests.get('http://www.reactome.org/ContentService/search/query?query=' + code +'&cluster=true')
results = r.json()
resultId = results['results'][0]['entries'][0]['stId']
r = requests.get('http://www.reactome.org/ContentService/data/pathways/low/entity/' + resultId)
print(r.json())
except:
print(r.json()['messages'][0])
return r.text
def main():
test = httpRequestReactome('0007264')
main()
|
<commit_before><commit_msg>Add an example of pathway extraction from Reaction.<commit_after>
|
import requests
def httpRequestReactome(code):
try:
r = requests.get('http://www.reactome.org/ContentService/search/query?query=' + code +'&cluster=true')
results = r.json()
resultId = results['results'][0]['entries'][0]['stId']
r = requests.get('http://www.reactome.org/ContentService/data/pathways/low/entity/' + resultId)
print(r.json())
except:
print(r.json()['messages'][0])
return r.text
def main():
test = httpRequestReactome('0007264')
main()
|
Add an example of pathway extraction from Reaction.import requests
def httpRequestReactome(code):
try:
r = requests.get('http://www.reactome.org/ContentService/search/query?query=' + code +'&cluster=true')
results = r.json()
resultId = results['results'][0]['entries'][0]['stId']
r = requests.get('http://www.reactome.org/ContentService/data/pathways/low/entity/' + resultId)
print(r.json())
except:
print(r.json()['messages'][0])
return r.text
def main():
test = httpRequestReactome('0007264')
main()
|
<commit_before><commit_msg>Add an example of pathway extraction from Reaction.<commit_after>import requests
def httpRequestReactome(code):
try:
r = requests.get('http://www.reactome.org/ContentService/search/query?query=' + code +'&cluster=true')
results = r.json()
resultId = results['results'][0]['entries'][0]['stId']
r = requests.get('http://www.reactome.org/ContentService/data/pathways/low/entity/' + resultId)
print(r.json())
except:
print(r.json()['messages'][0])
return r.text
def main():
test = httpRequestReactome('0007264')
main()
|
|
4d072cb1130b59b292437a9b1acc468996813cff
|
migrations/versions/0026_rename_notify_service.py
|
migrations/versions/0026_rename_notify_service.py
|
"""empty message
Revision ID: 0026_rename_notify_service
Revises: 0025_notify_service_data
Create Date: 2016-06-07 09:51:07.343334
"""
# revision identifiers, used by Alembic.
revision = '0026_rename_notify_service'
down_revision = '0025_notify_service_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.get_bind()
op.execute("update services set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
op.execute("update services_history set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
Rename notify service to GOV.UK Notify
|
Rename notify service to GOV.UK Notify
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Rename notify service to GOV.UK Notify
|
"""empty message
Revision ID: 0026_rename_notify_service
Revises: 0025_notify_service_data
Create Date: 2016-06-07 09:51:07.343334
"""
# revision identifiers, used by Alembic.
revision = '0026_rename_notify_service'
down_revision = '0025_notify_service_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.get_bind()
op.execute("update services set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
op.execute("update services_history set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
<commit_before><commit_msg>Rename notify service to GOV.UK Notify<commit_after>
|
"""empty message
Revision ID: 0026_rename_notify_service
Revises: 0025_notify_service_data
Create Date: 2016-06-07 09:51:07.343334
"""
# revision identifiers, used by Alembic.
revision = '0026_rename_notify_service'
down_revision = '0025_notify_service_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.get_bind()
op.execute("update services set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
op.execute("update services_history set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
Rename notify service to GOV.UK Notify"""empty message
Revision ID: 0026_rename_notify_service
Revises: 0025_notify_service_data
Create Date: 2016-06-07 09:51:07.343334
"""
# revision identifiers, used by Alembic.
revision = '0026_rename_notify_service'
down_revision = '0025_notify_service_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.get_bind()
op.execute("update services set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
op.execute("update services_history set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
<commit_before><commit_msg>Rename notify service to GOV.UK Notify<commit_after>"""empty message
Revision ID: 0026_rename_notify_service
Revises: 0025_notify_service_data
Create Date: 2016-06-07 09:51:07.343334
"""
# revision identifiers, used by Alembic.
revision = '0026_rename_notify_service'
down_revision = '0025_notify_service_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.get_bind()
op.execute("update services set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
op.execute("update services_history set name = 'GOV.UK Notify' where id = 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553'")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
|
e9b16e15c86485f163c14d43eda25396d5a11ba3
|
samples/python/topology/spl/files.py
|
samples/python/topology/spl/files.py
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
import sys
import string
from streamsx.topology.topology import Topology
from streamsx.topology import spl_ops
from streamsx.topology.spl_ops import *
from streamsx.topology.spl_types import *
from streamsx.topology.schema import *
import streamsx.topology.context
def main():
"""
This demonstrates the invocation of SPL operators from
the SPL standard toolkit.
Example:
python3 files.py
Output:
Capitalized words from the contents of files in /tmp/work
"""
# Create the container for the topology that will hold the streams of tuples.
topo = Topology("Files")
# Invoke an SPL DirectoryScan operator as a source.
# This one scans /tmp/work for files.
# Note the full kind of the operator is required.
files = source("spl.adapter::DirectoryScan", topology=topo,
schema=CommonSchema.String, params = {'directory': '/tmp/work'})
# Follow it with a FileSource operator
# If no schema is provided then the input schema is used.
lines = map("spl.adapter::FileSource", files)
# Feed the lines into a Python function
lines = lines.map(string.capwords)
# Sink lines by printing each of its tuples to standard output
lines.print()
# Now execute the topology by submitting to a standalone context.
streamsx.topology.context.submit("STANDALONE", topo.graph)
if __name__ == '__main__':
main()
|
Add sample app calling SPL functions
|
Add sample app calling SPL functions
|
Python
|
apache-2.0
|
IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology,ibmkendrick/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,ibmkendrick/streamsx.topology,IBMStreams/streamsx.topology,ddebrunner/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,wmarshall484/streamsx.topology,IBMStreams/streamsx.topology,wmarshall484/streamsx.topology,wmarshall484/streamsx.topology,ddebrunner/streamsx.topology,IBMStreams/streamsx.topology,ibmkendrick/streamsx.topology,ibmkendrick/streamsx.topology,ddebrunner/streamsx.topology
|
Add sample app calling SPL functions
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
import sys
import string
from streamsx.topology.topology import Topology
from streamsx.topology import spl_ops
from streamsx.topology.spl_ops import *
from streamsx.topology.spl_types import *
from streamsx.topology.schema import *
import streamsx.topology.context
def main():
"""
This demonstrates the invocation of SPL operators from
the SPL standard toolkit.
Example:
python3 files.py
Output:
Capitalized words from the contents of files in /tmp/work
"""
# Create the container for the topology that will hold the streams of tuples.
topo = Topology("Files")
# Invoke an SPL DirectoryScan operator as a source.
# This one scans /tmp/work for files.
# Note the full kind of the operator is required.
files = source("spl.adapter::DirectoryScan", topology=topo,
schema=CommonSchema.String, params = {'directory': '/tmp/work'})
# Follow it with a FileSource operator
# If no schema is provided then the input schema is used.
lines = map("spl.adapter::FileSource", files)
# Feed the lines into a Python function
lines = lines.map(string.capwords)
# Sink lines by printing each of its tuples to standard output
lines.print()
# Now execute the topology by submitting to a standalone context.
streamsx.topology.context.submit("STANDALONE", topo.graph)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add sample app calling SPL functions<commit_after>
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
import sys
import string
from streamsx.topology.topology import Topology
from streamsx.topology import spl_ops
from streamsx.topology.spl_ops import *
from streamsx.topology.spl_types import *
from streamsx.topology.schema import *
import streamsx.topology.context
def main():
"""
This demonstrates the invocation of SPL operators from
the SPL standard toolkit.
Example:
python3 files.py
Output:
Capitalized words from the contents of files in /tmp/work
"""
# Create the container for the topology that will hold the streams of tuples.
topo = Topology("Files")
# Invoke an SPL DirectoryScan operator as a source.
# This one scans /tmp/work for files.
# Note the full kind of the operator is required.
files = source("spl.adapter::DirectoryScan", topology=topo,
schema=CommonSchema.String, params = {'directory': '/tmp/work'})
# Follow it with a FileSource operator
# If no schema is provided then the input schema is used.
lines = map("spl.adapter::FileSource", files)
# Feed the lines into a Python function
lines = lines.map(string.capwords)
# Sink lines by printing each of its tuples to standard output
lines.print()
# Now execute the topology by submitting to a standalone context.
streamsx.topology.context.submit("STANDALONE", topo.graph)
if __name__ == '__main__':
main()
|
Add sample app calling SPL functions# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
import sys
import string
from streamsx.topology.topology import Topology
from streamsx.topology import spl_ops
from streamsx.topology.spl_ops import *
from streamsx.topology.spl_types import *
from streamsx.topology.schema import *
import streamsx.topology.context
def main():
"""
This demonstrates the invocation of SPL operators from
the SPL standard toolkit.
Example:
python3 files.py
Output:
Capitalized words from the contents of files in /tmp/work
"""
# Create the container for the topology that will hold the streams of tuples.
topo = Topology("Files")
# Invoke an SPL DirectoryScan operator as a source.
# This one scans /tmp/work for files.
# Note the full kind of the operator is required.
files = source("spl.adapter::DirectoryScan", topology=topo,
schema=CommonSchema.String, params = {'directory': '/tmp/work'})
# Follow it with a FileSource operator
# If no schema is provided then the input schema is used.
lines = map("spl.adapter::FileSource", files)
# Feed the lines into a Python function
lines = lines.map(string.capwords)
# Sink lines by printing each of its tuples to standard output
lines.print()
# Now execute the topology by submitting to a standalone context.
streamsx.topology.context.submit("STANDALONE", topo.graph)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add sample app calling SPL functions<commit_after># Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016
import sys
import string
from streamsx.topology.topology import Topology
from streamsx.topology import spl_ops
from streamsx.topology.spl_ops import *
from streamsx.topology.spl_types import *
from streamsx.topology.schema import *
import streamsx.topology.context
def main():
"""
This demonstrates the invocation of SPL operators from
the SPL standard toolkit.
Example:
python3 files.py
Output:
Capitalized words from the contents of files in /tmp/work
"""
# Create the container for the topology that will hold the streams of tuples.
topo = Topology("Files")
# Invoke an SPL DirectoryScan operator as a source.
# This one scans /tmp/work for files.
# Note the full kind of the operator is required.
files = source("spl.adapter::DirectoryScan", topology=topo,
schema=CommonSchema.String, params = {'directory': '/tmp/work'})
# Follow it with a FileSource operator
# If no schema is provided then the input schema is used.
lines = map("spl.adapter::FileSource", files)
# Feed the lines into a Python function
lines = lines.map(string.capwords)
# Sink lines by printing each of its tuples to standard output
lines.print()
# Now execute the topology by submitting to a standalone context.
streamsx.topology.context.submit("STANDALONE", topo.graph)
if __name__ == '__main__':
main()
|
|
843b59e022011c4b33c73763015e88eecb17e662
|
destroyer/services/facebook.py
|
destroyer/services/facebook.py
|
"""facebook.py - Module for Facebook service functionality"""
import time
import click
class FacebookDestroyer():
"""Destroyer class for Facebook integration.
"""
def __init__(self):
"""Initializer method"""
self.logger = None
def _unfriend(self):
"""Private method that takes a Facebook friend object and unfriends them."""
pass
def destroy(self):
"""Public method that implements the abstracted functionality of unfriending users"""
pass
|
Add basic Facebook service integration
|
Add basic Facebook service integration
|
Python
|
mit
|
jaredmichaelsmith/destroyer
|
Add basic Facebook service integration
|
"""facebook.py - Module for Facebook service functionality"""
import time
import click
class FacebookDestroyer():
"""Destroyer class for Facebook integration.
"""
def __init__(self):
"""Initializer method"""
self.logger = None
def _unfriend(self):
"""Private method that takes a Facebook friend object and unfriends them."""
pass
def destroy(self):
"""Public method that implements the abstracted functionality of unfriending users"""
pass
|
<commit_before><commit_msg>Add basic Facebook service integration<commit_after>
|
"""facebook.py - Module for Facebook service functionality"""
import time
import click
class FacebookDestroyer():
"""Destroyer class for Facebook integration.
"""
def __init__(self):
"""Initializer method"""
self.logger = None
def _unfriend(self):
"""Private method that takes a Facebook friend object and unfriends them."""
pass
def destroy(self):
"""Public method that implements the abstracted functionality of unfriending users"""
pass
|
Add basic Facebook service integration"""facebook.py - Module for Facebook service functionality"""
import time
import click
class FacebookDestroyer():
"""Destroyer class for Facebook integration.
"""
def __init__(self):
"""Initializer method"""
self.logger = None
def _unfriend(self):
"""Private method that takes a Facebook friend object and unfriends them."""
pass
def destroy(self):
"""Public method that implements the abstracted functionality of unfriending users"""
pass
|
<commit_before><commit_msg>Add basic Facebook service integration<commit_after>"""facebook.py - Module for Facebook service functionality"""
import time
import click
class FacebookDestroyer():
"""Destroyer class for Facebook integration.
"""
def __init__(self):
"""Initializer method"""
self.logger = None
def _unfriend(self):
"""Private method that takes a Facebook friend object and unfriends them."""
pass
def destroy(self):
"""Public method that implements the abstracted functionality of unfriending users"""
pass
|
|
8b12dba8c395f116cf3ac90dca7fafb27562371f
|
scripts/PetIBM/writePETScSolution.py
|
scripts/PetIBM/writePETScSolution.py
|
# file: createInitialPETScSolution.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Creates the initial solution and write in a PETSc-readable format.
import sys
import os
import argparse
import numpy
sys.path.append(os.environ['SCRIPTS'])
from library import miscellaneous
def parse_command_line():
"""Parses the command-line."""
print('[info] parsing the command-line ...'),
# create parser
parser = argparse.ArgumentParser(description='Creates and writes initial fields '
'in PETSc-readable files',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--directory', dest='directory',
type=str, default=os.getcwd(),
help='directory of the simulation')
# arguments about grid
parser.add_argument('--bottom-left', '-bl', dest='bottom_left',
type=float, nargs='+', default=[float('-inf'), float('-inf')],
help='coordinates of the bottom-left corner of the view')
parser.add_argument('--top-right', '-tr', dest='top_right',
type=float, nargs='+', default=[float('inf'), float('inf')],
help='coordinates of the top-right corner of the view')
parser.add_argument('--n', '-n', dest='n_cells',
type=int, nargs='+',
help='number of cells in each direction')
parser.add_argument('--periodic', dest='periodic_directions',
type=str, nargs='+',
default=[],
help='list of directions with periodic boundary conditions')
parser.add_argument('--solution', dest='solution',
type=str, nargs='+',
default=None,
help='class name followed by parameters required '
'to write the fields into PETSc-readable files')
# parse given options file
parser.add_argument('--options',
type=open, action=miscellaneous.ReadOptionsFromFile,
help='path of the file with options to parse')
print('done')
# parse command-line
return parser.parse_args()
def main():
"""Creates the initial velocity field on a staggered grid.
Converts the velocity components into fluxes.
Writes the fluxes and the pressure (zeros) into files.
"""
args = parse_command_line()
# create nodal stations along each direction
grid = [numpy.linspace(args.bottom_left[i], args.top_right[i], args.n_cells[i]+1)
for i in range(len(args.n_cells))]
from library.solutions.dispatcher import dispatcher
SolutionClass = dispatcher[args.solution[0]]
arguments = grid + args.solution[1:]
solution = SolutionClass(*arguments)
solution.write_fields_petsc_format(*arguments,
periodic_directions=args.periodic_directions,
directory=args.directory)
if __name__ == '__main__':
print('\n[{}] START\n'.format(os.path.basename(__file__)))
main()
print('\n[{}] END\n'.format(os.path.basename(__file__)))
|
Write the initial PetIBM readable solution in 0000000 sub-folder
|
Write the initial PetIBM readable solution in 0000000 sub-folder
|
Python
|
mit
|
mesnardo/snake
|
Write the initial PetIBM readable solution in 0000000 sub-folder
|
# file: createInitialPETScSolution.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Creates the initial solution and write in a PETSc-readable format.
import sys
import os
import argparse
import numpy
sys.path.append(os.environ['SCRIPTS'])
from library import miscellaneous
def parse_command_line():
"""Parses the command-line."""
print('[info] parsing the command-line ...'),
# create parser
parser = argparse.ArgumentParser(description='Creates and writes initial fields '
'in PETSc-readable files',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--directory', dest='directory',
type=str, default=os.getcwd(),
help='directory of the simulation')
# arguments about grid
parser.add_argument('--bottom-left', '-bl', dest='bottom_left',
type=float, nargs='+', default=[float('-inf'), float('-inf')],
help='coordinates of the bottom-left corner of the view')
parser.add_argument('--top-right', '-tr', dest='top_right',
type=float, nargs='+', default=[float('inf'), float('inf')],
help='coordinates of the top-right corner of the view')
parser.add_argument('--n', '-n', dest='n_cells',
type=int, nargs='+',
help='number of cells in each direction')
parser.add_argument('--periodic', dest='periodic_directions',
type=str, nargs='+',
default=[],
help='list of directions with periodic boundary conditions')
parser.add_argument('--solution', dest='solution',
type=str, nargs='+',
default=None,
help='class name followed by parameters required '
'to write the fields into PETSc-readable files')
# parse given options file
parser.add_argument('--options',
type=open, action=miscellaneous.ReadOptionsFromFile,
help='path of the file with options to parse')
print('done')
# parse command-line
return parser.parse_args()
def main():
"""Creates the initial velocity field on a staggered grid.
Converts the velocity components into fluxes.
Writes the fluxes and the pressure (zeros) into files.
"""
args = parse_command_line()
# create nodal stations along each direction
grid = [numpy.linspace(args.bottom_left[i], args.top_right[i], args.n_cells[i]+1)
for i in range(len(args.n_cells))]
from library.solutions.dispatcher import dispatcher
SolutionClass = dispatcher[args.solution[0]]
arguments = grid + args.solution[1:]
solution = SolutionClass(*arguments)
solution.write_fields_petsc_format(*arguments,
periodic_directions=args.periodic_directions,
directory=args.directory)
if __name__ == '__main__':
print('\n[{}] START\n'.format(os.path.basename(__file__)))
main()
print('\n[{}] END\n'.format(os.path.basename(__file__)))
|
<commit_before><commit_msg>Write the initial PetIBM readable solution in 0000000 sub-folder<commit_after>
|
# file: createInitialPETScSolution.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Creates the initial solution and write in a PETSc-readable format.
import sys
import os
import argparse
import numpy
sys.path.append(os.environ['SCRIPTS'])
from library import miscellaneous
def parse_command_line():
"""Parses the command-line."""
print('[info] parsing the command-line ...'),
# create parser
parser = argparse.ArgumentParser(description='Creates and writes initial fields '
'in PETSc-readable files',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--directory', dest='directory',
type=str, default=os.getcwd(),
help='directory of the simulation')
# arguments about grid
parser.add_argument('--bottom-left', '-bl', dest='bottom_left',
type=float, nargs='+', default=[float('-inf'), float('-inf')],
help='coordinates of the bottom-left corner of the view')
parser.add_argument('--top-right', '-tr', dest='top_right',
type=float, nargs='+', default=[float('inf'), float('inf')],
help='coordinates of the top-right corner of the view')
parser.add_argument('--n', '-n', dest='n_cells',
type=int, nargs='+',
help='number of cells in each direction')
parser.add_argument('--periodic', dest='periodic_directions',
type=str, nargs='+',
default=[],
help='list of directions with periodic boundary conditions')
parser.add_argument('--solution', dest='solution',
type=str, nargs='+',
default=None,
help='class name followed by parameters required '
'to write the fields into PETSc-readable files')
# parse given options file
parser.add_argument('--options',
type=open, action=miscellaneous.ReadOptionsFromFile,
help='path of the file with options to parse')
print('done')
# parse command-line
return parser.parse_args()
def main():
"""Creates the initial velocity field on a staggered grid.
Converts the velocity components into fluxes.
Writes the fluxes and the pressure (zeros) into files.
"""
args = parse_command_line()
# create nodal stations along each direction
grid = [numpy.linspace(args.bottom_left[i], args.top_right[i], args.n_cells[i]+1)
for i in range(len(args.n_cells))]
from library.solutions.dispatcher import dispatcher
SolutionClass = dispatcher[args.solution[0]]
arguments = grid + args.solution[1:]
solution = SolutionClass(*arguments)
solution.write_fields_petsc_format(*arguments,
periodic_directions=args.periodic_directions,
directory=args.directory)
if __name__ == '__main__':
print('\n[{}] START\n'.format(os.path.basename(__file__)))
main()
print('\n[{}] END\n'.format(os.path.basename(__file__)))
|
Write the initial PetIBM readable solution in 0000000 sub-folder# file: createInitialPETScSolution.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Creates the initial solution and write in a PETSc-readable format.
import sys
import os
import argparse
import numpy
sys.path.append(os.environ['SCRIPTS'])
from library import miscellaneous
def parse_command_line():
"""Parses the command-line."""
print('[info] parsing the command-line ...'),
# create parser
parser = argparse.ArgumentParser(description='Creates and writes initial fields '
'in PETSc-readable files',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--directory', dest='directory',
type=str, default=os.getcwd(),
help='directory of the simulation')
# arguments about grid
parser.add_argument('--bottom-left', '-bl', dest='bottom_left',
type=float, nargs='+', default=[float('-inf'), float('-inf')],
help='coordinates of the bottom-left corner of the view')
parser.add_argument('--top-right', '-tr', dest='top_right',
type=float, nargs='+', default=[float('inf'), float('inf')],
help='coordinates of the top-right corner of the view')
parser.add_argument('--n', '-n', dest='n_cells',
type=int, nargs='+',
help='number of cells in each direction')
parser.add_argument('--periodic', dest='periodic_directions',
type=str, nargs='+',
default=[],
help='list of directions with periodic boundary conditions')
parser.add_argument('--solution', dest='solution',
type=str, nargs='+',
default=None,
help='class name followed by parameters required '
'to write the fields into PETSc-readable files')
# parse given options file
parser.add_argument('--options',
type=open, action=miscellaneous.ReadOptionsFromFile,
help='path of the file with options to parse')
print('done')
# parse command-line
return parser.parse_args()
def main():
"""Creates the initial velocity field on a staggered grid.
Converts the velocity components into fluxes.
Writes the fluxes and the pressure (zeros) into files.
"""
args = parse_command_line()
# create nodal stations along each direction
grid = [numpy.linspace(args.bottom_left[i], args.top_right[i], args.n_cells[i]+1)
for i in range(len(args.n_cells))]
from library.solutions.dispatcher import dispatcher
SolutionClass = dispatcher[args.solution[0]]
arguments = grid + args.solution[1:]
solution = SolutionClass(*arguments)
solution.write_fields_petsc_format(*arguments,
periodic_directions=args.periodic_directions,
directory=args.directory)
if __name__ == '__main__':
print('\n[{}] START\n'.format(os.path.basename(__file__)))
main()
print('\n[{}] END\n'.format(os.path.basename(__file__)))
|
<commit_before><commit_msg>Write the initial PetIBM readable solution in 0000000 sub-folder<commit_after># file: createInitialPETScSolution.py
# author: Olivier Mesnard (mesnardo@gwu.edu)
# description: Creates the initial solution and write in a PETSc-readable format.
import sys
import os
import argparse
import numpy
sys.path.append(os.environ['SCRIPTS'])
from library import miscellaneous
def parse_command_line():
"""Parses the command-line."""
print('[info] parsing the command-line ...'),
# create parser
parser = argparse.ArgumentParser(description='Creates and writes initial fields '
'in PETSc-readable files',
formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# fill parser with arguments
parser.add_argument('--directory', dest='directory',
type=str, default=os.getcwd(),
help='directory of the simulation')
# arguments about grid
parser.add_argument('--bottom-left', '-bl', dest='bottom_left',
type=float, nargs='+', default=[float('-inf'), float('-inf')],
help='coordinates of the bottom-left corner of the view')
parser.add_argument('--top-right', '-tr', dest='top_right',
type=float, nargs='+', default=[float('inf'), float('inf')],
help='coordinates of the top-right corner of the view')
parser.add_argument('--n', '-n', dest='n_cells',
type=int, nargs='+',
help='number of cells in each direction')
parser.add_argument('--periodic', dest='periodic_directions',
type=str, nargs='+',
default=[],
help='list of directions with periodic boundary conditions')
parser.add_argument('--solution', dest='solution',
type=str, nargs='+',
default=None,
help='class name followed by parameters required '
'to write the fields into PETSc-readable files')
# parse given options file
parser.add_argument('--options',
type=open, action=miscellaneous.ReadOptionsFromFile,
help='path of the file with options to parse')
print('done')
# parse command-line
return parser.parse_args()
def main():
"""Creates the initial velocity field on a staggered grid.
Converts the velocity components into fluxes.
Writes the fluxes and the pressure (zeros) into files.
"""
args = parse_command_line()
# create nodal stations along each direction
grid = [numpy.linspace(args.bottom_left[i], args.top_right[i], args.n_cells[i]+1)
for i in range(len(args.n_cells))]
from library.solutions.dispatcher import dispatcher
SolutionClass = dispatcher[args.solution[0]]
arguments = grid + args.solution[1:]
solution = SolutionClass(*arguments)
solution.write_fields_petsc_format(*arguments,
periodic_directions=args.periodic_directions,
directory=args.directory)
if __name__ == '__main__':
print('\n[{}] START\n'.format(os.path.basename(__file__)))
main()
print('\n[{}] END\n'.format(os.path.basename(__file__)))
|
|
8bd853402f532932f55d7e876b320c70155625af
|
nntools/tests/test_init.py
|
nntools/tests/test_init.py
|
def test_shape():
from nntools.init import Initializer
# Assert that all `Initializer` sublasses return the shape that
# we've asked for in `sample`:
for klass in Initializer.__subclasses__():
assert klass().sample((12, 23)).shape == (12, 23)
def test_normal():
from nntools.init import Normal
sample = Normal().sample((100, 200))
assert -0.001 < sample.mean() < 0.001
def test_constant():
from nntools.init import Constant
sample = Constant(1.0).sample((10, 20))
assert (sample == 1.0).all()
def test_sparse():
from nntools.init import Sparse
sample = Sparse(sparsity=0.5).sample((10, 20))
assert (sample == 0.0).sum() == (sample != 0.0).sum()
assert (sample == 0.0).sum() == (10 * 20) / 2
def test_uniform_glorot():
from nntools.init import Uniform
sample = Uniform().sample((150, 450))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_glorot_receptive_field():
from nntools.init import Uniform
sample = Uniform().sample((150, 150, 2))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_range_as_number():
from nntools.init import Uniform
sample = Uniform(1.0).sample((300, 400))
assert sample.shape == (300, 400)
assert -1.1 < sample.min() < -0.9
assert 0.9 < sample.max() < 1.1
def test_uniform_range_as_range():
from nntools.init import Uniform
sample = Uniform((0.0, 1.0)).sample((300, 400))
assert sample.shape == (300, 400)
assert -0.1 < sample.min() < 0.1
assert 0.9 < sample.max() < 1.1
|
Add unit tests for nntools.init.
|
Add unit tests for nntools.init.
|
Python
|
mit
|
DataFighter/Lasagne-tutorial,MihailoIsakov/fishfish,Richi91/nntools,rlkelly/Lasagne,gabortakacs/Lasagne,cancan101/Lasagne,dnouri/Lasagne,DeanChan/Lasagne,ebattenberg/Lasagne,ErnstHowie/Lasagne,diogo149/Lasagne,dnuffer/Lasagne,kracwarlock/Lasagne,takacsg84/Lasagne,mheilman/Lasagne,317070/nntools
|
Add unit tests for nntools.init.
|
def test_shape():
from nntools.init import Initializer
# Assert that all `Initializer` sublasses return the shape that
# we've asked for in `sample`:
for klass in Initializer.__subclasses__():
assert klass().sample((12, 23)).shape == (12, 23)
def test_normal():
from nntools.init import Normal
sample = Normal().sample((100, 200))
assert -0.001 < sample.mean() < 0.001
def test_constant():
from nntools.init import Constant
sample = Constant(1.0).sample((10, 20))
assert (sample == 1.0).all()
def test_sparse():
from nntools.init import Sparse
sample = Sparse(sparsity=0.5).sample((10, 20))
assert (sample == 0.0).sum() == (sample != 0.0).sum()
assert (sample == 0.0).sum() == (10 * 20) / 2
def test_uniform_glorot():
from nntools.init import Uniform
sample = Uniform().sample((150, 450))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_glorot_receptive_field():
from nntools.init import Uniform
sample = Uniform().sample((150, 150, 2))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_range_as_number():
from nntools.init import Uniform
sample = Uniform(1.0).sample((300, 400))
assert sample.shape == (300, 400)
assert -1.1 < sample.min() < -0.9
assert 0.9 < sample.max() < 1.1
def test_uniform_range_as_range():
from nntools.init import Uniform
sample = Uniform((0.0, 1.0)).sample((300, 400))
assert sample.shape == (300, 400)
assert -0.1 < sample.min() < 0.1
assert 0.9 < sample.max() < 1.1
|
<commit_before><commit_msg>Add unit tests for nntools.init.<commit_after>
|
def test_shape():
from nntools.init import Initializer
# Assert that all `Initializer` sublasses return the shape that
# we've asked for in `sample`:
for klass in Initializer.__subclasses__():
assert klass().sample((12, 23)).shape == (12, 23)
def test_normal():
from nntools.init import Normal
sample = Normal().sample((100, 200))
assert -0.001 < sample.mean() < 0.001
def test_constant():
from nntools.init import Constant
sample = Constant(1.0).sample((10, 20))
assert (sample == 1.0).all()
def test_sparse():
from nntools.init import Sparse
sample = Sparse(sparsity=0.5).sample((10, 20))
assert (sample == 0.0).sum() == (sample != 0.0).sum()
assert (sample == 0.0).sum() == (10 * 20) / 2
def test_uniform_glorot():
from nntools.init import Uniform
sample = Uniform().sample((150, 450))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_glorot_receptive_field():
from nntools.init import Uniform
sample = Uniform().sample((150, 150, 2))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_range_as_number():
from nntools.init import Uniform
sample = Uniform(1.0).sample((300, 400))
assert sample.shape == (300, 400)
assert -1.1 < sample.min() < -0.9
assert 0.9 < sample.max() < 1.1
def test_uniform_range_as_range():
from nntools.init import Uniform
sample = Uniform((0.0, 1.0)).sample((300, 400))
assert sample.shape == (300, 400)
assert -0.1 < sample.min() < 0.1
assert 0.9 < sample.max() < 1.1
|
Add unit tests for nntools.init.def test_shape():
from nntools.init import Initializer
# Assert that all `Initializer` sublasses return the shape that
# we've asked for in `sample`:
for klass in Initializer.__subclasses__():
assert klass().sample((12, 23)).shape == (12, 23)
def test_normal():
from nntools.init import Normal
sample = Normal().sample((100, 200))
assert -0.001 < sample.mean() < 0.001
def test_constant():
from nntools.init import Constant
sample = Constant(1.0).sample((10, 20))
assert (sample == 1.0).all()
def test_sparse():
from nntools.init import Sparse
sample = Sparse(sparsity=0.5).sample((10, 20))
assert (sample == 0.0).sum() == (sample != 0.0).sum()
assert (sample == 0.0).sum() == (10 * 20) / 2
def test_uniform_glorot():
from nntools.init import Uniform
sample = Uniform().sample((150, 450))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_glorot_receptive_field():
from nntools.init import Uniform
sample = Uniform().sample((150, 150, 2))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_range_as_number():
from nntools.init import Uniform
sample = Uniform(1.0).sample((300, 400))
assert sample.shape == (300, 400)
assert -1.1 < sample.min() < -0.9
assert 0.9 < sample.max() < 1.1
def test_uniform_range_as_range():
from nntools.init import Uniform
sample = Uniform((0.0, 1.0)).sample((300, 400))
assert sample.shape == (300, 400)
assert -0.1 < sample.min() < 0.1
assert 0.9 < sample.max() < 1.1
|
<commit_before><commit_msg>Add unit tests for nntools.init.<commit_after>def test_shape():
from nntools.init import Initializer
# Assert that all `Initializer` sublasses return the shape that
# we've asked for in `sample`:
for klass in Initializer.__subclasses__():
assert klass().sample((12, 23)).shape == (12, 23)
def test_normal():
from nntools.init import Normal
sample = Normal().sample((100, 200))
assert -0.001 < sample.mean() < 0.001
def test_constant():
from nntools.init import Constant
sample = Constant(1.0).sample((10, 20))
assert (sample == 1.0).all()
def test_sparse():
from nntools.init import Sparse
sample = Sparse(sparsity=0.5).sample((10, 20))
assert (sample == 0.0).sum() == (sample != 0.0).sum()
assert (sample == 0.0).sum() == (10 * 20) / 2
def test_uniform_glorot():
from nntools.init import Uniform
sample = Uniform().sample((150, 450))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_glorot_receptive_field():
from nntools.init import Uniform
sample = Uniform().sample((150, 150, 2))
assert -0.11 < sample.min() < -0.09
assert 0.09 < sample.max() < 0.11
def test_uniform_range_as_number():
from nntools.init import Uniform
sample = Uniform(1.0).sample((300, 400))
assert sample.shape == (300, 400)
assert -1.1 < sample.min() < -0.9
assert 0.9 < sample.max() < 1.1
def test_uniform_range_as_range():
from nntools.init import Uniform
sample = Uniform((0.0, 1.0)).sample((300, 400))
assert sample.shape == (300, 400)
assert -0.1 < sample.min() < 0.1
assert 0.9 < sample.max() < 1.1
|
|
25c58b40bd66a421796f2696b9c0e44ef5d19c7a
|
tournamentcontrol/competition/migrations/0021_competition__data__person_uuid.py
|
tournamentcontrol/competition/migrations/0021_competition__data__person_uuid.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 20:19
from __future__ import unicode_literals
from django.db import migrations
def link_person_model(apps, schema_editor):
Person = apps.get_model('competition', 'Person')
for person in Person.objects.all():
person.clubassociation_set.update(person_uuid=person.uuid)
person.teamassociation_set.update(person_uuid=person.uuid)
person.seasonassociation_set.update(person_uuid=person.uuid)
person.statistics.update(player_uuid=person.uuid)
class Migration(migrations.Migration):
dependencies = [
('competition', '0020_competition__add__person_uuid'),
]
operations = [
migrations.RunPython(
link_person_model, reverse_code=migrations.RunPython.noop),
]
|
Add data migration to build ForeignKey on Person.uuid field
|
Add data migration to build ForeignKey on Person.uuid field
|
Python
|
bsd-3-clause
|
goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic
|
Add data migration to build ForeignKey on Person.uuid field
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 20:19
from __future__ import unicode_literals
from django.db import migrations
def link_person_model(apps, schema_editor):
Person = apps.get_model('competition', 'Person')
for person in Person.objects.all():
person.clubassociation_set.update(person_uuid=person.uuid)
person.teamassociation_set.update(person_uuid=person.uuid)
person.seasonassociation_set.update(person_uuid=person.uuid)
person.statistics.update(player_uuid=person.uuid)
class Migration(migrations.Migration):
dependencies = [
('competition', '0020_competition__add__person_uuid'),
]
operations = [
migrations.RunPython(
link_person_model, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add data migration to build ForeignKey on Person.uuid field<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 20:19
from __future__ import unicode_literals
from django.db import migrations
def link_person_model(apps, schema_editor):
Person = apps.get_model('competition', 'Person')
for person in Person.objects.all():
person.clubassociation_set.update(person_uuid=person.uuid)
person.teamassociation_set.update(person_uuid=person.uuid)
person.seasonassociation_set.update(person_uuid=person.uuid)
person.statistics.update(player_uuid=person.uuid)
class Migration(migrations.Migration):
dependencies = [
('competition', '0020_competition__add__person_uuid'),
]
operations = [
migrations.RunPython(
link_person_model, reverse_code=migrations.RunPython.noop),
]
|
Add data migration to build ForeignKey on Person.uuid field# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 20:19
from __future__ import unicode_literals
from django.db import migrations
def link_person_model(apps, schema_editor):
Person = apps.get_model('competition', 'Person')
for person in Person.objects.all():
person.clubassociation_set.update(person_uuid=person.uuid)
person.teamassociation_set.update(person_uuid=person.uuid)
person.seasonassociation_set.update(person_uuid=person.uuid)
person.statistics.update(player_uuid=person.uuid)
class Migration(migrations.Migration):
dependencies = [
('competition', '0020_competition__add__person_uuid'),
]
operations = [
migrations.RunPython(
link_person_model, reverse_code=migrations.RunPython.noop),
]
|
<commit_before><commit_msg>Add data migration to build ForeignKey on Person.uuid field<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 20:19
from __future__ import unicode_literals
from django.db import migrations
def link_person_model(apps, schema_editor):
Person = apps.get_model('competition', 'Person')
for person in Person.objects.all():
person.clubassociation_set.update(person_uuid=person.uuid)
person.teamassociation_set.update(person_uuid=person.uuid)
person.seasonassociation_set.update(person_uuid=person.uuid)
person.statistics.update(player_uuid=person.uuid)
class Migration(migrations.Migration):
dependencies = [
('competition', '0020_competition__add__person_uuid'),
]
operations = [
migrations.RunPython(
link_person_model, reverse_code=migrations.RunPython.noop),
]
|
|
33637fcc457e070fd946b53f3da8f2b90ee039fa
|
03testPgCRUD.py
|
03testPgCRUD.py
|
'''
Created on Aug 5, 2010
@author: apm
'''
#Test CRUD on opengemdb database
#CREATE : Create testTable
# : Insert rows
#READ : Read rows
#UPDATE : Change row values
#DELETE : Delete Element
# : Drop Table
#!/usr/bin/python
import psycopg2
#note that we have to import the Psycopg2 extras library!
import sys
import pprint
def main():
#start of script
#Define our connection string
conn_string = "host='gemsun01.ethz.ch' dbname='opengemdb' user='gemuser' password='gem4321'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
try:
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
print "Connected!\n"
# conn.cursor will return a cursor oject, you can use this cursor to perform queries
cursor = conn.cursor()
# create a Table
cursor.execute("CREATE TABLE testTable (name char(8), value float)")
# insert rows into table
cursor.execute("INSERT INTO testTable VALUES('one', 1.0)")
cursor.execute("INSERT INTO testTable VALUES('two', 2.0)")
cursor.execute("INSERT INTO testTable VALUES('three', 3.0)")
# makes changes permanent
conn.commit()
# read rows in table and print
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
for i in range(len(rows)):
print "Row:", i, "name:", rows[i][0], "value:", rows[i][1]
# print out the records using pretty print
print "Print rows using pretty print"
pprint.pprint(rows)
# delete a row
print "delete row three"
cursor.execute("DELETE FROM testTable where name='three'")
conn.commit()
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
print "Print rows with row 3 deleted using pretty print"
pprint.pprint(rows)
print "Dropping table testTable"
cursor.execute("DROP TABLE testTable")
conn.commit()
print "End CRUD Test with Python, DB API 2.0 with db adapter psycopg2!"
except:
# Get the most recent exception
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
# Exit the script and print an error telling what happened.
sys.exit("Database connection failed!\n ->%s" % (exceptionValue))
if __name__ == "__main__":
sys.exit(main())
|
Test Python-Postgresql CRUD using psycopg2 driver
|
Test Python-Postgresql CRUD using psycopg2 driver
|
Python
|
agpl-3.0
|
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
|
Test Python-Postgresql CRUD using psycopg2 driver
|
'''
Created on Aug 5, 2010
@author: apm
'''
#Test CRUD on opengemdb database
#CREATE : Create testTable
# : Insert rows
#READ : Read rows
#UPDATE : Change row values
#DELETE : Delete Element
# : Drop Table
#!/usr/bin/python
import psycopg2
#note that we have to import the Psycopg2 extras library!
import sys
import pprint
def main():
#start of script
#Define our connection string
conn_string = "host='gemsun01.ethz.ch' dbname='opengemdb' user='gemuser' password='gem4321'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
try:
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
print "Connected!\n"
# conn.cursor will return a cursor oject, you can use this cursor to perform queries
cursor = conn.cursor()
# create a Table
cursor.execute("CREATE TABLE testTable (name char(8), value float)")
# insert rows into table
cursor.execute("INSERT INTO testTable VALUES('one', 1.0)")
cursor.execute("INSERT INTO testTable VALUES('two', 2.0)")
cursor.execute("INSERT INTO testTable VALUES('three', 3.0)")
# makes changes permanent
conn.commit()
# read rows in table and print
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
for i in range(len(rows)):
print "Row:", i, "name:", rows[i][0], "value:", rows[i][1]
# print out the records using pretty print
print "Print rows using pretty print"
pprint.pprint(rows)
# delete a row
print "delete row three"
cursor.execute("DELETE FROM testTable where name='three'")
conn.commit()
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
print "Print rows with row 3 deleted using pretty print"
pprint.pprint(rows)
print "Dropping table testTable"
cursor.execute("DROP TABLE testTable")
conn.commit()
print "End CRUD Test with Python, DB API 2.0 with db adapter psycopg2!"
except:
# Get the most recent exception
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
# Exit the script and print an error telling what happened.
sys.exit("Database connection failed!\n ->%s" % (exceptionValue))
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Test Python-Postgresql CRUD using psycopg2 driver<commit_after>
|
'''
Created on Aug 5, 2010
@author: apm
'''
#Test CRUD on opengemdb database
#CREATE : Create testTable
# : Insert rows
#READ : Read rows
#UPDATE : Change row values
#DELETE : Delete Element
# : Drop Table
#!/usr/bin/python
import psycopg2
#note that we have to import the Psycopg2 extras library!
import sys
import pprint
def main():
#start of script
#Define our connection string
conn_string = "host='gemsun01.ethz.ch' dbname='opengemdb' user='gemuser' password='gem4321'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
try:
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
print "Connected!\n"
# conn.cursor will return a cursor oject, you can use this cursor to perform queries
cursor = conn.cursor()
# create a Table
cursor.execute("CREATE TABLE testTable (name char(8), value float)")
# insert rows into table
cursor.execute("INSERT INTO testTable VALUES('one', 1.0)")
cursor.execute("INSERT INTO testTable VALUES('two', 2.0)")
cursor.execute("INSERT INTO testTable VALUES('three', 3.0)")
# makes changes permanent
conn.commit()
# read rows in table and print
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
for i in range(len(rows)):
print "Row:", i, "name:", rows[i][0], "value:", rows[i][1]
# print out the records using pretty print
print "Print rows using pretty print"
pprint.pprint(rows)
# delete a row
print "delete row three"
cursor.execute("DELETE FROM testTable where name='three'")
conn.commit()
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
print "Print rows with row 3 deleted using pretty print"
pprint.pprint(rows)
print "Dropping table testTable"
cursor.execute("DROP TABLE testTable")
conn.commit()
print "End CRUD Test with Python, DB API 2.0 with db adapter psycopg2!"
except:
# Get the most recent exception
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
# Exit the script and print an error telling what happened.
sys.exit("Database connection failed!\n ->%s" % (exceptionValue))
if __name__ == "__main__":
sys.exit(main())
|
Test Python-Postgresql CRUD using psycopg2 driver'''
Created on Aug 5, 2010
@author: apm
'''
#Test CRUD on opengemdb database
#CREATE : Create testTable
# : Insert rows
#READ : Read rows
#UPDATE : Change row values
#DELETE : Delete Element
# : Drop Table
#!/usr/bin/python
import psycopg2
#note that we have to import the Psycopg2 extras library!
import sys
import pprint
def main():
#start of script
#Define our connection string
conn_string = "host='gemsun01.ethz.ch' dbname='opengemdb' user='gemuser' password='gem4321'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
try:
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
print "Connected!\n"
# conn.cursor will return a cursor oject, you can use this cursor to perform queries
cursor = conn.cursor()
# create a Table
cursor.execute("CREATE TABLE testTable (name char(8), value float)")
# insert rows into table
cursor.execute("INSERT INTO testTable VALUES('one', 1.0)")
cursor.execute("INSERT INTO testTable VALUES('two', 2.0)")
cursor.execute("INSERT INTO testTable VALUES('three', 3.0)")
# makes changes permanent
conn.commit()
# read rows in table and print
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
for i in range(len(rows)):
print "Row:", i, "name:", rows[i][0], "value:", rows[i][1]
# print out the records using pretty print
print "Print rows using pretty print"
pprint.pprint(rows)
# delete a row
print "delete row three"
cursor.execute("DELETE FROM testTable where name='three'")
conn.commit()
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
print "Print rows with row 3 deleted using pretty print"
pprint.pprint(rows)
print "Dropping table testTable"
cursor.execute("DROP TABLE testTable")
conn.commit()
print "End CRUD Test with Python, DB API 2.0 with db adapter psycopg2!"
except:
# Get the most recent exception
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
# Exit the script and print an error telling what happened.
sys.exit("Database connection failed!\n ->%s" % (exceptionValue))
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Test Python-Postgresql CRUD using psycopg2 driver<commit_after>'''
Created on Aug 5, 2010
@author: apm
'''
#Test CRUD on opengemdb database
#CREATE : Create testTable
# : Insert rows
#READ : Read rows
#UPDATE : Change row values
#DELETE : Delete Element
# : Drop Table
#!/usr/bin/python
import psycopg2
#note that we have to import the Psycopg2 extras library!
import sys
import pprint
def main():
#start of script
#Define our connection string
conn_string = "host='gemsun01.ethz.ch' dbname='opengemdb' user='gemuser' password='gem4321'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
try:
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
print "Connected!\n"
# conn.cursor will return a cursor oject, you can use this cursor to perform queries
cursor = conn.cursor()
# create a Table
cursor.execute("CREATE TABLE testTable (name char(8), value float)")
# insert rows into table
cursor.execute("INSERT INTO testTable VALUES('one', 1.0)")
cursor.execute("INSERT INTO testTable VALUES('two', 2.0)")
cursor.execute("INSERT INTO testTable VALUES('three', 3.0)")
# makes changes permanent
conn.commit()
# read rows in table and print
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
for i in range(len(rows)):
print "Row:", i, "name:", rows[i][0], "value:", rows[i][1]
# print out the records using pretty print
print "Print rows using pretty print"
pprint.pprint(rows)
# delete a row
print "delete row three"
cursor.execute("DELETE FROM testTable where name='three'")
conn.commit()
cursor.execute("SELECT * FROM testTable")
rows = cursor.fetchall()
print "Print rows with row 3 deleted using pretty print"
pprint.pprint(rows)
print "Dropping table testTable"
cursor.execute("DROP TABLE testTable")
conn.commit()
print "End CRUD Test with Python, DB API 2.0 with db adapter psycopg2!"
except:
# Get the most recent exception
exceptionType, exceptionValue, exceptionTraceback = sys.exc_info()
# Exit the script and print an error telling what happened.
sys.exit("Database connection failed!\n ->%s" % (exceptionValue))
if __name__ == "__main__":
sys.exit(main())
|
|
85a027102cff16fc15bd8d959c23781582506b67
|
eppy/tests/test_parse_error.py
|
eppy/tests/test_parse_error.py
|
import os
import shutil
import sys
from six import StringIO
from eppy.runner.run_functions import parse_error, EnergyPlusRunError
def test_capture_stderr():
tmp_out = StringIO()
sys.stderr = tmp_out
sys.stderr.write("I am in stderr")
msg = parse_error(tmp_out, "C:/notafile")
assert "<File not found>" in msg
assert "I am in stderr" in msg
sys.stderr = sys.__stderr__
def test_capture_real_error(test_idf):
test_idf.newidfobject(
"HVACTemplate:Thermostat",
Name="thermostat VRF",
Heating_Setpoint_Schedule_Name=15,
Constant_Cooling_Setpoint=25,
)
rundir = "test_capture_real_error"
os.mkdir(rundir)
try:
test_idf.run(output_directory=rundir)
except EnergyPlusRunError as e:
assert "invalid Heating Setpoint Temperature Schedule" in str(e)
finally:
shutil.rmtree(rundir)
|
Test for an error that is still raised
|
Test for an error that is still raised
This is needed since we now automatically expand objects if required.
|
Python
|
mit
|
santoshphilip/eppy,santoshphilip/eppy,santoshphilip/eppy
|
Test for an error that is still raised
This is needed since we now automatically expand objects if required.
|
import os
import shutil
import sys
from six import StringIO
from eppy.runner.run_functions import parse_error, EnergyPlusRunError
def test_capture_stderr():
tmp_out = StringIO()
sys.stderr = tmp_out
sys.stderr.write("I am in stderr")
msg = parse_error(tmp_out, "C:/notafile")
assert "<File not found>" in msg
assert "I am in stderr" in msg
sys.stderr = sys.__stderr__
def test_capture_real_error(test_idf):
test_idf.newidfobject(
"HVACTemplate:Thermostat",
Name="thermostat VRF",
Heating_Setpoint_Schedule_Name=15,
Constant_Cooling_Setpoint=25,
)
rundir = "test_capture_real_error"
os.mkdir(rundir)
try:
test_idf.run(output_directory=rundir)
except EnergyPlusRunError as e:
assert "invalid Heating Setpoint Temperature Schedule" in str(e)
finally:
shutil.rmtree(rundir)
|
<commit_before><commit_msg>Test for an error that is still raised
This is needed since we now automatically expand objects if required.<commit_after>
|
import os
import shutil
import sys
from six import StringIO
from eppy.runner.run_functions import parse_error, EnergyPlusRunError
def test_capture_stderr():
tmp_out = StringIO()
sys.stderr = tmp_out
sys.stderr.write("I am in stderr")
msg = parse_error(tmp_out, "C:/notafile")
assert "<File not found>" in msg
assert "I am in stderr" in msg
sys.stderr = sys.__stderr__
def test_capture_real_error(test_idf):
test_idf.newidfobject(
"HVACTemplate:Thermostat",
Name="thermostat VRF",
Heating_Setpoint_Schedule_Name=15,
Constant_Cooling_Setpoint=25,
)
rundir = "test_capture_real_error"
os.mkdir(rundir)
try:
test_idf.run(output_directory=rundir)
except EnergyPlusRunError as e:
assert "invalid Heating Setpoint Temperature Schedule" in str(e)
finally:
shutil.rmtree(rundir)
|
Test for an error that is still raised
This is needed since we now automatically expand objects if required.
import os
import shutil
import sys
from six import StringIO
from eppy.runner.run_functions import parse_error, EnergyPlusRunError
def test_capture_stderr():
tmp_out = StringIO()
sys.stderr = tmp_out
sys.stderr.write("I am in stderr")
msg = parse_error(tmp_out, "C:/notafile")
assert "<File not found>" in msg
assert "I am in stderr" in msg
sys.stderr = sys.__stderr__
def test_capture_real_error(test_idf):
test_idf.newidfobject(
"HVACTemplate:Thermostat",
Name="thermostat VRF",
Heating_Setpoint_Schedule_Name=15,
Constant_Cooling_Setpoint=25,
)
rundir = "test_capture_real_error"
os.mkdir(rundir)
try:
test_idf.run(output_directory=rundir)
except EnergyPlusRunError as e:
assert "invalid Heating Setpoint Temperature Schedule" in str(e)
finally:
shutil.rmtree(rundir)
|
<commit_before><commit_msg>Test for an error that is still raised
This is needed since we now automatically expand objects if required.<commit_after>
import os
import shutil
import sys
from six import StringIO
from eppy.runner.run_functions import parse_error, EnergyPlusRunError
def test_capture_stderr():
tmp_out = StringIO()
sys.stderr = tmp_out
sys.stderr.write("I am in stderr")
msg = parse_error(tmp_out, "C:/notafile")
assert "<File not found>" in msg
assert "I am in stderr" in msg
sys.stderr = sys.__stderr__
def test_capture_real_error(test_idf):
test_idf.newidfobject(
"HVACTemplate:Thermostat",
Name="thermostat VRF",
Heating_Setpoint_Schedule_Name=15,
Constant_Cooling_Setpoint=25,
)
rundir = "test_capture_real_error"
os.mkdir(rundir)
try:
test_idf.run(output_directory=rundir)
except EnergyPlusRunError as e:
assert "invalid Heating Setpoint Temperature Schedule" in str(e)
finally:
shutil.rmtree(rundir)
|
|
d3663a7e2bdc6b20d043b55c7fc4f29b7880242d
|
holmes/migrations/versions/7f4a3b8c55d_idx_requests_by_status_completed.py
|
holmes/migrations/versions/7f4a3b8c55d_idx_requests_by_status_completed.py
|
"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
|
Speed up request count by status in period of days
|
Speed up request count by status in period of days
|
Python
|
mit
|
holmes-app/holmes-api,holmes-app/holmes-api
|
Speed up request count by status in period of days
|
"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
|
<commit_before><commit_msg>Speed up request count by status in period of days<commit_after>
|
"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
|
Speed up request count by status in period of days"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
|
<commit_before><commit_msg>Speed up request count by status in period of days<commit_after>"""idx requests by status, completed
Revision ID: 7f4a3b8c55d
Revises: 4b96dd9974bb
Create Date: 2014-03-31 17:39:40.858182
"""
# revision identifiers, used by Alembic.
revision = '7f4a3b8c55d'
down_revision = '4b96dd9974bb'
from alembic import op
def upgrade():
op.create_index('idx_status_complete', 'requests', ['status_code', 'completed_date'])
def downgrade():
op.drop_index('idx_status_complete', 'requests')
|
|
0d243fbb4eeb9f1065a3f29fa00b44e1349cb946
|
vpr/tests/migrate/shell_migrate.py
|
vpr/tests/migrate/shell_migrate.py
|
from django.db import connection
from vpr_content import models
def removeDuplicatedTitleInMaterial():
cur = connection.cursor()
qr0 = 'select id from vpr_content_material'
qr1 = 'select text from vpr_content_material where id=%d'
qr2 = 'update vpr_content_material set text=\'%s\' where id=%d'
pt0 = '<div class="title">'
pt1 = '</div>'
cur.execute(qr0)
mids = cur.fetchall()
for mid in mids:
try:
mid = mid[0]
cur.execute(qr1 % mid)
text = cur.fetchone()[0]
p0 = text.find(pt0)
p1 = text.find(pt1, p0)
text = text[:p0] + text[p1+len(pt1)+1:]
material = models.Material.objects.get(pk=mid)
material.text = text
material.save()
print mid
except:
raise
print 'Updating failed at ' + str(mid)
|
Add migrate on shell script
|
Add migrate on shell script
|
Python
|
agpl-3.0
|
voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo
|
Add migrate on shell script
|
from django.db import connection
from vpr_content import models
def removeDuplicatedTitleInMaterial():
cur = connection.cursor()
qr0 = 'select id from vpr_content_material'
qr1 = 'select text from vpr_content_material where id=%d'
qr2 = 'update vpr_content_material set text=\'%s\' where id=%d'
pt0 = '<div class="title">'
pt1 = '</div>'
cur.execute(qr0)
mids = cur.fetchall()
for mid in mids:
try:
mid = mid[0]
cur.execute(qr1 % mid)
text = cur.fetchone()[0]
p0 = text.find(pt0)
p1 = text.find(pt1, p0)
text = text[:p0] + text[p1+len(pt1)+1:]
material = models.Material.objects.get(pk=mid)
material.text = text
material.save()
print mid
except:
raise
print 'Updating failed at ' + str(mid)
|
<commit_before><commit_msg>Add migrate on shell script<commit_after>
|
from django.db import connection
from vpr_content import models
def removeDuplicatedTitleInMaterial():
cur = connection.cursor()
qr0 = 'select id from vpr_content_material'
qr1 = 'select text from vpr_content_material where id=%d'
qr2 = 'update vpr_content_material set text=\'%s\' where id=%d'
pt0 = '<div class="title">'
pt1 = '</div>'
cur.execute(qr0)
mids = cur.fetchall()
for mid in mids:
try:
mid = mid[0]
cur.execute(qr1 % mid)
text = cur.fetchone()[0]
p0 = text.find(pt0)
p1 = text.find(pt1, p0)
text = text[:p0] + text[p1+len(pt1)+1:]
material = models.Material.objects.get(pk=mid)
material.text = text
material.save()
print mid
except:
raise
print 'Updating failed at ' + str(mid)
|
Add migrate on shell scriptfrom django.db import connection
from vpr_content import models
def removeDuplicatedTitleInMaterial():
cur = connection.cursor()
qr0 = 'select id from vpr_content_material'
qr1 = 'select text from vpr_content_material where id=%d'
qr2 = 'update vpr_content_material set text=\'%s\' where id=%d'
pt0 = '<div class="title">'
pt1 = '</div>'
cur.execute(qr0)
mids = cur.fetchall()
for mid in mids:
try:
mid = mid[0]
cur.execute(qr1 % mid)
text = cur.fetchone()[0]
p0 = text.find(pt0)
p1 = text.find(pt1, p0)
text = text[:p0] + text[p1+len(pt1)+1:]
material = models.Material.objects.get(pk=mid)
material.text = text
material.save()
print mid
except:
raise
print 'Updating failed at ' + str(mid)
|
<commit_before><commit_msg>Add migrate on shell script<commit_after>from django.db import connection
from vpr_content import models
def removeDuplicatedTitleInMaterial():
cur = connection.cursor()
qr0 = 'select id from vpr_content_material'
qr1 = 'select text from vpr_content_material where id=%d'
qr2 = 'update vpr_content_material set text=\'%s\' where id=%d'
pt0 = '<div class="title">'
pt1 = '</div>'
cur.execute(qr0)
mids = cur.fetchall()
for mid in mids:
try:
mid = mid[0]
cur.execute(qr1 % mid)
text = cur.fetchone()[0]
p0 = text.find(pt0)
p1 = text.find(pt1, p0)
text = text[:p0] + text[p1+len(pt1)+1:]
material = models.Material.objects.get(pk=mid)
material.text = text
material.save()
print mid
except:
raise
print 'Updating failed at ' + str(mid)
|
|
d75785472dd79f61067a98597064faa450feb100
|
libnamebench/config_test.py
|
libnamebench/config_test.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
Add some tests for dns config parsing
|
Add some tests for dns config parsing
|
Python
|
apache-2.0
|
mirek2580/namebench
|
Add some tests for dns config parsing
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add some tests for dns config parsing<commit_after>
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
Add some tests for dns config parsing#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add some tests for dns config parsing<commit_after>#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the config module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import unittest
import config
class ConfigTest(unittest.TestCase):
def testParseFullLine(self):
line = '129.250.35.251=NTT (2) # y.ns.gin.ntt.net,39.569,-104.8582 (Englewood/CO/US)'
expected = {'name': 'NTT (2)', 'service': 'NTT', 'ip': '129.250.35.251',
'lon': '-104.8582', 'instance': '2', 'country_code': 'US',
'lat': '39.569'}
self.assertEquals(config._ParseServerLine(line), expected)
def testOpenDNSLine(self):
line = '208.67.220.220=OpenDNS # resolver2.opendns.com'
expected = {'name': 'OpenDNS', 'service': 'OpenDNS', 'ip': '208.67.220.220',
'lon': None, 'instance': None, 'country_code': None,
'lat': None}
self.assertEquals(config._ParseServerLine(line), expected)
def testLineWithNoRegion(self):
line = '4.2.2.2=Level/GTEI-2 (3) # vnsc-bak.sys.gtei.net,38.0,-97.0 (US) '
expected = {'name': 'Level/GTEI-2 (3)', 'service': 'Level/GTEI-2',
'ip': '4.2.2.2', 'lon': '-97.0', 'instance': '3',
'country_code': 'US', 'lat': '38.0'}
self.assertEquals(config._ParseServerLine(line), expected)
if __name__ == '__main__':
unittest.main()
|
|
3a296e94be0f0870241bb451f3c5ad06b81deea2
|
integration-test/1298-missing-road.py
|
integration-test/1298-missing-road.py
|
# Relation: 611 (975266)
#https://www.openstreetmap.org/relation/975266
test.assert_has_feature(
12, 1192, 1539, "roads",
{"kind": "major_road",
"shield_text": "611",
"ref": "PA 611",
"all_shield_texts": ["611"],
})
|
Add regressiong test for missing road
|
Add regressiong test for missing road
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
Add regressiong test for missing road
|
# Relation: 611 (975266)
#https://www.openstreetmap.org/relation/975266
test.assert_has_feature(
12, 1192, 1539, "roads",
{"kind": "major_road",
"shield_text": "611",
"ref": "PA 611",
"all_shield_texts": ["611"],
})
|
<commit_before><commit_msg>Add regressiong test for missing road<commit_after>
|
# Relation: 611 (975266)
#https://www.openstreetmap.org/relation/975266
test.assert_has_feature(
12, 1192, 1539, "roads",
{"kind": "major_road",
"shield_text": "611",
"ref": "PA 611",
"all_shield_texts": ["611"],
})
|
Add regressiong test for missing road# Relation: 611 (975266)
#https://www.openstreetmap.org/relation/975266
test.assert_has_feature(
12, 1192, 1539, "roads",
{"kind": "major_road",
"shield_text": "611",
"ref": "PA 611",
"all_shield_texts": ["611"],
})
|
<commit_before><commit_msg>Add regressiong test for missing road<commit_after># Relation: 611 (975266)
#https://www.openstreetmap.org/relation/975266
test.assert_has_feature(
12, 1192, 1539, "roads",
{"kind": "major_road",
"shield_text": "611",
"ref": "PA 611",
"all_shield_texts": ["611"],
})
|
|
52adf2407684730cfe3b5d4f9cab414c1442c6f1
|
simplejtag_l0.py
|
simplejtag_l0.py
|
import time
import logging
import serial
from SWDAdapterBase import *
CMD_WRITE_TMS = ord('8') # +1
CMD_WRITE_TCK = ord(' ') # +1 aka SWCLK
CMD_READ_TMS = ord('d') # aka SWDIO
RESP_ACK = ord('+')
RESP_NACK = ord('-')
RESP_VAL = ord('0')
class Adapter(SWDAdapterBase):
def __init__(self, options):
SWDAdapterBase.__init__(self)
if not options.port:
raise SWDInitError("Port parameter is required")
self.hwlog = logging.getLogger("hwcomm")
self.port = serial.Serial(port=options.port, baudrate=115200, timeout=0.1)
self.init_adapter()
self.JTAG2SWD()
def init_adapter(self):
pass
def cmd(self, cmd):
self.port.write(bytearray([cmd]))
resp = self.port.read(1)
print "%02x:%s" % (cmd, resp),
return ord(resp)
def readBits(self, num):
"Read 1-8 bits from SWD"
res = 0
mask = 1
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TCK)
v = self.cmd(CMD_READ_TMS)
if v & 1:
res |= mask
mask <<= 1
print
self.hwlog.debug("Read %#02x", res)
return res
def writeBits(self, val, num):
"Write 1-8 bits to SWD"
v = val
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TMS + 1 if val & 1 else CMD_WRITE_TMS)
self.cmd(CMD_WRITE_TCK)
val >>= 1
print
self.hwlog.debug("Wrote %#02x", v)
|
Add adapter plugin for simplejtag level0 protocol.
|
Add adapter plugin for simplejtag level0 protocol.
https://github.com/pfalcon/simplejtag
|
Python
|
bsd-3-clause
|
heartscrytech/PySWD,pfalcon/PySWD,kcuzner/PySWD,heartscrytech/PySWD,pfalcon/PySWD,kcuzner/PySWD,pfalcon/PySWD
|
Add adapter plugin for simplejtag level0 protocol.
https://github.com/pfalcon/simplejtag
|
import time
import logging
import serial
from SWDAdapterBase import *
CMD_WRITE_TMS = ord('8') # +1
CMD_WRITE_TCK = ord(' ') # +1 aka SWCLK
CMD_READ_TMS = ord('d') # aka SWDIO
RESP_ACK = ord('+')
RESP_NACK = ord('-')
RESP_VAL = ord('0')
class Adapter(SWDAdapterBase):
def __init__(self, options):
SWDAdapterBase.__init__(self)
if not options.port:
raise SWDInitError("Port parameter is required")
self.hwlog = logging.getLogger("hwcomm")
self.port = serial.Serial(port=options.port, baudrate=115200, timeout=0.1)
self.init_adapter()
self.JTAG2SWD()
def init_adapter(self):
pass
def cmd(self, cmd):
self.port.write(bytearray([cmd]))
resp = self.port.read(1)
print "%02x:%s" % (cmd, resp),
return ord(resp)
def readBits(self, num):
"Read 1-8 bits from SWD"
res = 0
mask = 1
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TCK)
v = self.cmd(CMD_READ_TMS)
if v & 1:
res |= mask
mask <<= 1
print
self.hwlog.debug("Read %#02x", res)
return res
def writeBits(self, val, num):
"Write 1-8 bits to SWD"
v = val
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TMS + 1 if val & 1 else CMD_WRITE_TMS)
self.cmd(CMD_WRITE_TCK)
val >>= 1
print
self.hwlog.debug("Wrote %#02x", v)
|
<commit_before><commit_msg>Add adapter plugin for simplejtag level0 protocol.
https://github.com/pfalcon/simplejtag<commit_after>
|
import time
import logging
import serial
from SWDAdapterBase import *
CMD_WRITE_TMS = ord('8') # +1
CMD_WRITE_TCK = ord(' ') # +1 aka SWCLK
CMD_READ_TMS = ord('d') # aka SWDIO
RESP_ACK = ord('+')
RESP_NACK = ord('-')
RESP_VAL = ord('0')
class Adapter(SWDAdapterBase):
def __init__(self, options):
SWDAdapterBase.__init__(self)
if not options.port:
raise SWDInitError("Port parameter is required")
self.hwlog = logging.getLogger("hwcomm")
self.port = serial.Serial(port=options.port, baudrate=115200, timeout=0.1)
self.init_adapter()
self.JTAG2SWD()
def init_adapter(self):
pass
def cmd(self, cmd):
self.port.write(bytearray([cmd]))
resp = self.port.read(1)
print "%02x:%s" % (cmd, resp),
return ord(resp)
def readBits(self, num):
"Read 1-8 bits from SWD"
res = 0
mask = 1
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TCK)
v = self.cmd(CMD_READ_TMS)
if v & 1:
res |= mask
mask <<= 1
print
self.hwlog.debug("Read %#02x", res)
return res
def writeBits(self, val, num):
"Write 1-8 bits to SWD"
v = val
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TMS + 1 if val & 1 else CMD_WRITE_TMS)
self.cmd(CMD_WRITE_TCK)
val >>= 1
print
self.hwlog.debug("Wrote %#02x", v)
|
Add adapter plugin for simplejtag level0 protocol.
https://github.com/pfalcon/simplejtagimport time
import logging
import serial
from SWDAdapterBase import *
CMD_WRITE_TMS = ord('8') # +1
CMD_WRITE_TCK = ord(' ') # +1 aka SWCLK
CMD_READ_TMS = ord('d') # aka SWDIO
RESP_ACK = ord('+')
RESP_NACK = ord('-')
RESP_VAL = ord('0')
class Adapter(SWDAdapterBase):
def __init__(self, options):
SWDAdapterBase.__init__(self)
if not options.port:
raise SWDInitError("Port parameter is required")
self.hwlog = logging.getLogger("hwcomm")
self.port = serial.Serial(port=options.port, baudrate=115200, timeout=0.1)
self.init_adapter()
self.JTAG2SWD()
def init_adapter(self):
pass
def cmd(self, cmd):
self.port.write(bytearray([cmd]))
resp = self.port.read(1)
print "%02x:%s" % (cmd, resp),
return ord(resp)
def readBits(self, num):
"Read 1-8 bits from SWD"
res = 0
mask = 1
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TCK)
v = self.cmd(CMD_READ_TMS)
if v & 1:
res |= mask
mask <<= 1
print
self.hwlog.debug("Read %#02x", res)
return res
def writeBits(self, val, num):
"Write 1-8 bits to SWD"
v = val
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TMS + 1 if val & 1 else CMD_WRITE_TMS)
self.cmd(CMD_WRITE_TCK)
val >>= 1
print
self.hwlog.debug("Wrote %#02x", v)
|
<commit_before><commit_msg>Add adapter plugin for simplejtag level0 protocol.
https://github.com/pfalcon/simplejtag<commit_after>import time
import logging
import serial
from SWDAdapterBase import *
CMD_WRITE_TMS = ord('8') # +1
CMD_WRITE_TCK = ord(' ') # +1 aka SWCLK
CMD_READ_TMS = ord('d') # aka SWDIO
RESP_ACK = ord('+')
RESP_NACK = ord('-')
RESP_VAL = ord('0')
class Adapter(SWDAdapterBase):
def __init__(self, options):
SWDAdapterBase.__init__(self)
if not options.port:
raise SWDInitError("Port parameter is required")
self.hwlog = logging.getLogger("hwcomm")
self.port = serial.Serial(port=options.port, baudrate=115200, timeout=0.1)
self.init_adapter()
self.JTAG2SWD()
def init_adapter(self):
pass
def cmd(self, cmd):
self.port.write(bytearray([cmd]))
resp = self.port.read(1)
print "%02x:%s" % (cmd, resp),
return ord(resp)
def readBits(self, num):
"Read 1-8 bits from SWD"
res = 0
mask = 1
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TCK)
v = self.cmd(CMD_READ_TMS)
if v & 1:
res |= mask
mask <<= 1
print
self.hwlog.debug("Read %#02x", res)
return res
def writeBits(self, val, num):
"Write 1-8 bits to SWD"
v = val
for i in xrange(num):
self.cmd(CMD_WRITE_TCK | 1)
self.cmd(CMD_WRITE_TMS + 1 if val & 1 else CMD_WRITE_TMS)
self.cmd(CMD_WRITE_TCK)
val >>= 1
print
self.hwlog.debug("Wrote %#02x", v)
|
|
1167c2789521ceeb2a852a1e20bdce4b40db3b04
|
playserver/tracksocketserver.py
|
playserver/tracksocketserver.py
|
import autobahn.asyncio.websocket
class TrackSocket(autobah.asyncio.websocket.WebSocketServerProtocol):
@asyncio.coroutine
def onOpen(self):
#TODO: Make this add to the client list
print("opened")
def closed(self, wasClean, code, reason):
#TODO: make this remove from the client list
print("closed")
|
Add basic TrackSocket using autobahn
|
Add basic TrackSocket using autobahn
|
Python
|
mit
|
ollien/playserver,ollien/playserver,ollien/playserver
|
Add basic TrackSocket using autobahn
|
import autobahn.asyncio.websocket
class TrackSocket(autobah.asyncio.websocket.WebSocketServerProtocol):
@asyncio.coroutine
def onOpen(self):
#TODO: Make this add to the client list
print("opened")
def closed(self, wasClean, code, reason):
#TODO: make this remove from the client list
print("closed")
|
<commit_before><commit_msg>Add basic TrackSocket using autobahn<commit_after>
|
import autobahn.asyncio.websocket
class TrackSocket(autobah.asyncio.websocket.WebSocketServerProtocol):
@asyncio.coroutine
def onOpen(self):
#TODO: Make this add to the client list
print("opened")
def closed(self, wasClean, code, reason):
#TODO: make this remove from the client list
print("closed")
|
Add basic TrackSocket using autobahnimport autobahn.asyncio.websocket
class TrackSocket(autobah.asyncio.websocket.WebSocketServerProtocol):
@asyncio.coroutine
def onOpen(self):
#TODO: Make this add to the client list
print("opened")
def closed(self, wasClean, code, reason):
#TODO: make this remove from the client list
print("closed")
|
<commit_before><commit_msg>Add basic TrackSocket using autobahn<commit_after>import autobahn.asyncio.websocket
class TrackSocket(autobah.asyncio.websocket.WebSocketServerProtocol):
@asyncio.coroutine
def onOpen(self):
#TODO: Make this add to the client list
print("opened")
def closed(self, wasClean, code, reason):
#TODO: make this remove from the client list
print("closed")
|
|
c657836fa7b7e9d248cb35f67384a6da87409380
|
mapit/management/commands/mapit_UK_fix_2014-05.py
|
mapit/management/commands/mapit_UK_fix_2014-05.py
|
# This script is to be run as a one-off to fix up some geometries in the May
# 2014 edition of boundary line that are invalid.
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.contrib.gis.gdal import *
from mapit.models import Area, CodeType
from ..command_utils import fix_invalid_geos_geometry
class Command(NoArgsCommand):
help = 'Fix the UK boundary line import for May 2014'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
code_version = CodeType.objects.get(code='gss')
# Get the polygons that we want to fix
# The areas with bad polygons are:
# E05009392 - LBW Colville
# E05009400 - LBW Pembridge
# W04000985 - CPC Pen Tranch Community
# W04000980 - CPC Abersychan Community
# W05000992 - UTE Abersychan
# W05000999 - UTE Snatchwood
areas_to_fix = (
'E05009392',
'E05009400',
'W04000985',
'W04000980',
'W05000992',
'W05000999',
)
for ons_code in areas_to_fix:
area = Area.objects.get(codes__code=ons_code, codes__type=code_version)
assert area.polygons.count() == 1
area_polygon = area.polygons.first()
fixed_polygon = fix_invalid_geos_geometry(area_polygon.polygon)
if fixed_polygon:
print "Fixed polygon {0}".format(area_polygon)
area_polygon.polygon = fixed_polygon
if options['commit']:
area_polygon.save()
else:
print "Could not fix polygon {0}".format(area_polygon)
|
Add a command to fix invalid geometries in 2014-05 boundary line
|
Add a command to fix invalid geometries in 2014-05 boundary line
Some of the geometries the Ordnance Survey provided were invalid,
having duplicated start and end points. This command fixes them up.
|
Python
|
agpl-3.0
|
chris48s/mapit,Code4SA/mapit,opencorato/mapit,opencorato/mapit,opencorato/mapit,chris48s/mapit,Code4SA/mapit,Sinar/mapit,Code4SA/mapit,Sinar/mapit,chris48s/mapit
|
Add a command to fix invalid geometries in 2014-05 boundary line
Some of the geometries the Ordnance Survey provided were invalid,
having duplicated start and end points. This command fixes them up.
|
# This script is to be run as a one-off to fix up some geometries in the May
# 2014 edition of boundary line that are invalid.
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.contrib.gis.gdal import *
from mapit.models import Area, CodeType
from ..command_utils import fix_invalid_geos_geometry
class Command(NoArgsCommand):
help = 'Fix the UK boundary line import for May 2014'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
code_version = CodeType.objects.get(code='gss')
# Get the polygons that we want to fix
# The areas with bad polygons are:
# E05009392 - LBW Colville
# E05009400 - LBW Pembridge
# W04000985 - CPC Pen Tranch Community
# W04000980 - CPC Abersychan Community
# W05000992 - UTE Abersychan
# W05000999 - UTE Snatchwood
areas_to_fix = (
'E05009392',
'E05009400',
'W04000985',
'W04000980',
'W05000992',
'W05000999',
)
for ons_code in areas_to_fix:
area = Area.objects.get(codes__code=ons_code, codes__type=code_version)
assert area.polygons.count() == 1
area_polygon = area.polygons.first()
fixed_polygon = fix_invalid_geos_geometry(area_polygon.polygon)
if fixed_polygon:
print "Fixed polygon {0}".format(area_polygon)
area_polygon.polygon = fixed_polygon
if options['commit']:
area_polygon.save()
else:
print "Could not fix polygon {0}".format(area_polygon)
|
<commit_before><commit_msg>Add a command to fix invalid geometries in 2014-05 boundary line
Some of the geometries the Ordnance Survey provided were invalid,
having duplicated start and end points. This command fixes them up.<commit_after>
|
# This script is to be run as a one-off to fix up some geometries in the May
# 2014 edition of boundary line that are invalid.
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.contrib.gis.gdal import *
from mapit.models import Area, CodeType
from ..command_utils import fix_invalid_geos_geometry
class Command(NoArgsCommand):
help = 'Fix the UK boundary line import for May 2014'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
code_version = CodeType.objects.get(code='gss')
# Get the polygons that we want to fix
# The areas with bad polygons are:
# E05009392 - LBW Colville
# E05009400 - LBW Pembridge
# W04000985 - CPC Pen Tranch Community
# W04000980 - CPC Abersychan Community
# W05000992 - UTE Abersychan
# W05000999 - UTE Snatchwood
areas_to_fix = (
'E05009392',
'E05009400',
'W04000985',
'W04000980',
'W05000992',
'W05000999',
)
for ons_code in areas_to_fix:
area = Area.objects.get(codes__code=ons_code, codes__type=code_version)
assert area.polygons.count() == 1
area_polygon = area.polygons.first()
fixed_polygon = fix_invalid_geos_geometry(area_polygon.polygon)
if fixed_polygon:
print "Fixed polygon {0}".format(area_polygon)
area_polygon.polygon = fixed_polygon
if options['commit']:
area_polygon.save()
else:
print "Could not fix polygon {0}".format(area_polygon)
|
Add a command to fix invalid geometries in 2014-05 boundary line
Some of the geometries the Ordnance Survey provided were invalid,
having duplicated start and end points. This command fixes them up.# This script is to be run as a one-off to fix up some geometries in the May
# 2014 edition of boundary line that are invalid.
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.contrib.gis.gdal import *
from mapit.models import Area, CodeType
from ..command_utils import fix_invalid_geos_geometry
class Command(NoArgsCommand):
help = 'Fix the UK boundary line import for May 2014'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
code_version = CodeType.objects.get(code='gss')
# Get the polygons that we want to fix
# The areas with bad polygons are:
# E05009392 - LBW Colville
# E05009400 - LBW Pembridge
# W04000985 - CPC Pen Tranch Community
# W04000980 - CPC Abersychan Community
# W05000992 - UTE Abersychan
# W05000999 - UTE Snatchwood
areas_to_fix = (
'E05009392',
'E05009400',
'W04000985',
'W04000980',
'W05000992',
'W05000999',
)
for ons_code in areas_to_fix:
area = Area.objects.get(codes__code=ons_code, codes__type=code_version)
assert area.polygons.count() == 1
area_polygon = area.polygons.first()
fixed_polygon = fix_invalid_geos_geometry(area_polygon.polygon)
if fixed_polygon:
print "Fixed polygon {0}".format(area_polygon)
area_polygon.polygon = fixed_polygon
if options['commit']:
area_polygon.save()
else:
print "Could not fix polygon {0}".format(area_polygon)
|
<commit_before><commit_msg>Add a command to fix invalid geometries in 2014-05 boundary line
Some of the geometries the Ordnance Survey provided were invalid,
having duplicated start and end points. This command fixes them up.<commit_after># This script is to be run as a one-off to fix up some geometries in the May
# 2014 edition of boundary line that are invalid.
from optparse import make_option
from django.core.management.base import NoArgsCommand
from django.contrib.gis.gdal import *
from mapit.models import Area, CodeType
from ..command_utils import fix_invalid_geos_geometry
class Command(NoArgsCommand):
help = 'Fix the UK boundary line import for May 2014'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
code_version = CodeType.objects.get(code='gss')
# Get the polygons that we want to fix
# The areas with bad polygons are:
# E05009392 - LBW Colville
# E05009400 - LBW Pembridge
# W04000985 - CPC Pen Tranch Community
# W04000980 - CPC Abersychan Community
# W05000992 - UTE Abersychan
# W05000999 - UTE Snatchwood
areas_to_fix = (
'E05009392',
'E05009400',
'W04000985',
'W04000980',
'W05000992',
'W05000999',
)
for ons_code in areas_to_fix:
area = Area.objects.get(codes__code=ons_code, codes__type=code_version)
assert area.polygons.count() == 1
area_polygon = area.polygons.first()
fixed_polygon = fix_invalid_geos_geometry(area_polygon.polygon)
if fixed_polygon:
print "Fixed polygon {0}".format(area_polygon)
area_polygon.polygon = fixed_polygon
if options['commit']:
area_polygon.save()
else:
print "Could not fix polygon {0}".format(area_polygon)
|
|
9161b64bb5625c11a1786826c02b33c28e9cc169
|
bongo/apps/archive/management/commands/flushimported.py
|
bongo/apps/archive/management/commands/flushimported.py
|
from django.core.management.base import BaseCommand
from django.db import models as registered_models
class Command(BaseCommand):
def handle(self, *args, **options):
for model in registered_models.get_models(include_auto_created=True):
if model._meta.app_label == "bongo":
for obj in model.objects.all():
if obj.imported:
obj.delete()
|
Add management command to delete all objects that were imported from archive
|
Add management command to delete all objects that were imported from archive
|
Python
|
mit
|
BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo
|
Add management command to delete all objects that were imported from archive
|
from django.core.management.base import BaseCommand
from django.db import models as registered_models
class Command(BaseCommand):
def handle(self, *args, **options):
for model in registered_models.get_models(include_auto_created=True):
if model._meta.app_label == "bongo":
for obj in model.objects.all():
if obj.imported:
obj.delete()
|
<commit_before><commit_msg>Add management command to delete all objects that were imported from archive<commit_after>
|
from django.core.management.base import BaseCommand
from django.db import models as registered_models
class Command(BaseCommand):
def handle(self, *args, **options):
for model in registered_models.get_models(include_auto_created=True):
if model._meta.app_label == "bongo":
for obj in model.objects.all():
if obj.imported:
obj.delete()
|
Add management command to delete all objects that were imported from archivefrom django.core.management.base import BaseCommand
from django.db import models as registered_models
class Command(BaseCommand):
def handle(self, *args, **options):
for model in registered_models.get_models(include_auto_created=True):
if model._meta.app_label == "bongo":
for obj in model.objects.all():
if obj.imported:
obj.delete()
|
<commit_before><commit_msg>Add management command to delete all objects that were imported from archive<commit_after>from django.core.management.base import BaseCommand
from django.db import models as registered_models
class Command(BaseCommand):
def handle(self, *args, **options):
for model in registered_models.get_models(include_auto_created=True):
if model._meta.app_label == "bongo":
for obj in model.objects.all():
if obj.imported:
obj.delete()
|
|
7e69b7ca81a25d56764fc8a9ddd45d023ce23550
|
kinect2_calibration/scripts/convert_calib_pose_to_urdf_format.py
|
kinect2_calibration/scripts/convert_calib_pose_to_urdf_format.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import os
import tempfile
import tf
import yaml
def read_calib_pose(fname):
tmp = tempfile.TemporaryFile()
# we need modify original yaml file because yaml.load(fname) simply will fail
with open(fname, "r") as f:
reader = f.readlines()
for row in reader:
if row[0] == "%":
# remove first line: "%YAML:1.0"
continue
if row.find("!!") != -1:
# remove "!!opencv-matrix"
row = row[:row.find("!!")] + os.linesep
tmp.write(row)
tmp.seek(0)
data = yaml.load(tmp)
return data
def calc_xyz_rpy(data):
mat = np.resize(data["rotation"]["data"], (3, 3))
xyz = data["translation"]["data"]
rpy = tf.transformations.euler_from_matrix(mat)
return xyz, rpy
def print_urdf(xyz, rpy):
print("""
<joint name=\"kinect2_rgb_joint\" type=\"fixed\">
<origin xyz=\"{x} {y} {z}\" rpy=\"{roll} {pitch} {yaw}\"/>
<parent link=\"kinect2_rgb_optical_frame\"/>
<child link=\"kinect2_ir_optical_frame\"/>
</joint>
""".format(x=xyz[0], y=xyz[1], z=xyz[2],
roll=rpy[0], pitch=rpy[1], yaw=rpy[2]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='calculate transform from kinect2_rgb_optical_frame to kinect2_ir_optical_frame')
parser.add_argument('-f', type=str, help='path to calib_pose.yaml', metavar='file', required=True)
args = parser.parse_args()
data = read_calib_pose(args.f)
xyz, rpy = calc_xyz_rpy(data)
print_urdf(xyz, rpy)
|
Add a convert script from calib_pose.yaml to urdf format
|
Add a convert script from calib_pose.yaml to urdf format
|
Python
|
apache-2.0
|
code-iai/iai_kinect2,code-iai/iai_kinect2,code-iai/iai_kinect2
|
Add a convert script from calib_pose.yaml to urdf format
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import os
import tempfile
import tf
import yaml
def read_calib_pose(fname):
tmp = tempfile.TemporaryFile()
# we need modify original yaml file because yaml.load(fname) simply will fail
with open(fname, "r") as f:
reader = f.readlines()
for row in reader:
if row[0] == "%":
# remove first line: "%YAML:1.0"
continue
if row.find("!!") != -1:
# remove "!!opencv-matrix"
row = row[:row.find("!!")] + os.linesep
tmp.write(row)
tmp.seek(0)
data = yaml.load(tmp)
return data
def calc_xyz_rpy(data):
mat = np.resize(data["rotation"]["data"], (3, 3))
xyz = data["translation"]["data"]
rpy = tf.transformations.euler_from_matrix(mat)
return xyz, rpy
def print_urdf(xyz, rpy):
print("""
<joint name=\"kinect2_rgb_joint\" type=\"fixed\">
<origin xyz=\"{x} {y} {z}\" rpy=\"{roll} {pitch} {yaw}\"/>
<parent link=\"kinect2_rgb_optical_frame\"/>
<child link=\"kinect2_ir_optical_frame\"/>
</joint>
""".format(x=xyz[0], y=xyz[1], z=xyz[2],
roll=rpy[0], pitch=rpy[1], yaw=rpy[2]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='calculate transform from kinect2_rgb_optical_frame to kinect2_ir_optical_frame')
parser.add_argument('-f', type=str, help='path to calib_pose.yaml', metavar='file', required=True)
args = parser.parse_args()
data = read_calib_pose(args.f)
xyz, rpy = calc_xyz_rpy(data)
print_urdf(xyz, rpy)
|
<commit_before><commit_msg>Add a convert script from calib_pose.yaml to urdf format<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import os
import tempfile
import tf
import yaml
def read_calib_pose(fname):
tmp = tempfile.TemporaryFile()
# we need modify original yaml file because yaml.load(fname) simply will fail
with open(fname, "r") as f:
reader = f.readlines()
for row in reader:
if row[0] == "%":
# remove first line: "%YAML:1.0"
continue
if row.find("!!") != -1:
# remove "!!opencv-matrix"
row = row[:row.find("!!")] + os.linesep
tmp.write(row)
tmp.seek(0)
data = yaml.load(tmp)
return data
def calc_xyz_rpy(data):
mat = np.resize(data["rotation"]["data"], (3, 3))
xyz = data["translation"]["data"]
rpy = tf.transformations.euler_from_matrix(mat)
return xyz, rpy
def print_urdf(xyz, rpy):
print("""
<joint name=\"kinect2_rgb_joint\" type=\"fixed\">
<origin xyz=\"{x} {y} {z}\" rpy=\"{roll} {pitch} {yaw}\"/>
<parent link=\"kinect2_rgb_optical_frame\"/>
<child link=\"kinect2_ir_optical_frame\"/>
</joint>
""".format(x=xyz[0], y=xyz[1], z=xyz[2],
roll=rpy[0], pitch=rpy[1], yaw=rpy[2]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='calculate transform from kinect2_rgb_optical_frame to kinect2_ir_optical_frame')
parser.add_argument('-f', type=str, help='path to calib_pose.yaml', metavar='file', required=True)
args = parser.parse_args()
data = read_calib_pose(args.f)
xyz, rpy = calc_xyz_rpy(data)
print_urdf(xyz, rpy)
|
Add a convert script from calib_pose.yaml to urdf format#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import os
import tempfile
import tf
import yaml
def read_calib_pose(fname):
tmp = tempfile.TemporaryFile()
# we need modify original yaml file because yaml.load(fname) simply will fail
with open(fname, "r") as f:
reader = f.readlines()
for row in reader:
if row[0] == "%":
# remove first line: "%YAML:1.0"
continue
if row.find("!!") != -1:
# remove "!!opencv-matrix"
row = row[:row.find("!!")] + os.linesep
tmp.write(row)
tmp.seek(0)
data = yaml.load(tmp)
return data
def calc_xyz_rpy(data):
mat = np.resize(data["rotation"]["data"], (3, 3))
xyz = data["translation"]["data"]
rpy = tf.transformations.euler_from_matrix(mat)
return xyz, rpy
def print_urdf(xyz, rpy):
print("""
<joint name=\"kinect2_rgb_joint\" type=\"fixed\">
<origin xyz=\"{x} {y} {z}\" rpy=\"{roll} {pitch} {yaw}\"/>
<parent link=\"kinect2_rgb_optical_frame\"/>
<child link=\"kinect2_ir_optical_frame\"/>
</joint>
""".format(x=xyz[0], y=xyz[1], z=xyz[2],
roll=rpy[0], pitch=rpy[1], yaw=rpy[2]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='calculate transform from kinect2_rgb_optical_frame to kinect2_ir_optical_frame')
parser.add_argument('-f', type=str, help='path to calib_pose.yaml', metavar='file', required=True)
args = parser.parse_args()
data = read_calib_pose(args.f)
xyz, rpy = calc_xyz_rpy(data)
print_urdf(xyz, rpy)
|
<commit_before><commit_msg>Add a convert script from calib_pose.yaml to urdf format<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import os
import tempfile
import tf
import yaml
def read_calib_pose(fname):
tmp = tempfile.TemporaryFile()
# we need modify original yaml file because yaml.load(fname) simply will fail
with open(fname, "r") as f:
reader = f.readlines()
for row in reader:
if row[0] == "%":
# remove first line: "%YAML:1.0"
continue
if row.find("!!") != -1:
# remove "!!opencv-matrix"
row = row[:row.find("!!")] + os.linesep
tmp.write(row)
tmp.seek(0)
data = yaml.load(tmp)
return data
def calc_xyz_rpy(data):
mat = np.resize(data["rotation"]["data"], (3, 3))
xyz = data["translation"]["data"]
rpy = tf.transformations.euler_from_matrix(mat)
return xyz, rpy
def print_urdf(xyz, rpy):
print("""
<joint name=\"kinect2_rgb_joint\" type=\"fixed\">
<origin xyz=\"{x} {y} {z}\" rpy=\"{roll} {pitch} {yaw}\"/>
<parent link=\"kinect2_rgb_optical_frame\"/>
<child link=\"kinect2_ir_optical_frame\"/>
</joint>
""".format(x=xyz[0], y=xyz[1], z=xyz[2],
roll=rpy[0], pitch=rpy[1], yaw=rpy[2]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='calculate transform from kinect2_rgb_optical_frame to kinect2_ir_optical_frame')
parser.add_argument('-f', type=str, help='path to calib_pose.yaml', metavar='file', required=True)
args = parser.parse_args()
data = read_calib_pose(args.f)
xyz, rpy = calc_xyz_rpy(data)
print_urdf(xyz, rpy)
|
|
4f754ad3177eb0fcdc10ccf7804349a9453e9ff0
|
asyncio/__init__.py
|
asyncio/__init__.py
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__
|
Fix asyncio.__all__: export also unix_events and windows_events symbols
|
Fix asyncio.__all__: export also unix_events and windows_events symbols
For example, on Windows, it was not possible to get ProactorEventLoop or
DefaultEventLoopPolicy using "from asyncio import *".
|
Python
|
apache-2.0
|
vxgmichel/asyncio,gvanrossum/asyncio,manipopopo/asyncio,jashandeep-sohi/asyncio,Martiusweb/asyncio,ajdavis/asyncio,Martiusweb/asyncio,jashandeep-sohi/asyncio,1st1/asyncio,haypo/trollius,fallen/asyncio,vxgmichel/asyncio,gvanrossum/asyncio,Martiusweb/asyncio,jashandeep-sohi/asyncio,fallen/asyncio,haypo/trollius,haypo/trollius,ajdavis/asyncio,manipopopo/asyncio,fallen/asyncio,vxgmichel/asyncio,gsb-eng/asyncio,gsb-eng/asyncio,manipopopo/asyncio,gvanrossum/asyncio,ajdavis/asyncio,1st1/asyncio,gsb-eng/asyncio,1st1/asyncio
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
Fix asyncio.__all__: export also unix_events and windows_events symbols
For example, on Windows, it was not possible to get ProactorEventLoop or
DefaultEventLoopPolicy using "from asyncio import *".
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__
|
<commit_before>"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
<commit_msg>Fix asyncio.__all__: export also unix_events and windows_events symbols
For example, on Windows, it was not possible to get ProactorEventLoop or
DefaultEventLoopPolicy using "from asyncio import *".<commit_after>
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__
|
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
Fix asyncio.__all__: export also unix_events and windows_events symbols
For example, on Windows, it was not possible to get ProactorEventLoop or
DefaultEventLoopPolicy using "from asyncio import *"."""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__
|
<commit_before>"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
else:
from .unix_events import * # pragma: no cover
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
<commit_msg>Fix asyncio.__all__: export also unix_events and windows_events symbols
For example, on Windows, it was not possible to get ProactorEventLoop or
DefaultEventLoopPolicy using "from asyncio import *".<commit_after>"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__
|
d71f62599effde01c1bc33abbd4a034b5d1a87d0
|
caltech/show-nll.py
|
caltech/show-nll.py
|
#!/usr/bin/env python
from __future__ import division
import sys
import logging
from time import time
import cPickle as pickle
import numpy as np
import h5py
import pylab
#import theano
#import theano.tensor as T
_logger = logging.getLogger()
#=============================================================================
if __name__ == "__main__":
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('out_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
#pylab.figsize(12, 8)
pylab.xlabel("Epochs")
pylab.ylabel("Est. LL")
pylab.ylim([-200, -100])
for out_dir in args.out_dir:
fname = out_dir+"/results.h5"
try:
with h5py.File(fname, "r") as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
LL100 = h5['learning.monitor.100.LL'][:]
pylab.plot(LL100[::2], label=out_dir[-20:])
print "==== %s ====" % out_dir
if 'learning.monitor.10.LL' in h5:
LL10 = h5['learning.monitor.10.LL'][:]
print "Final LL [ 10 samples]: %.2f" % LL10[-1]
print "Final LL [100 samples]: %.2f" % LL100[-1]
if 'learning.monitor.500.LL' in h5:
LL500 = h5['learning.monitor.500.LL'][:]
print "Final LL [500 samples]: %.2f" % LL500[-1]
except KeyError, e:
logger.info("Failed to read data from %s" % fname)
except IOError, e:
logger.info("Failed to open %s fname: %s" % (fname, e))
pylab.legend(loc="lower right")
pylab.show()
|
Add script to plot ~LL's
|
Add script to plot ~LL's
|
Python
|
agpl-3.0
|
codeaudit/reweighted-ws,codeaudit/reweighted-ws,lenovor/reweighted-ws,jbornschein/reweighted-ws,yanweifu/reweighted-ws,jbornschein/y2k,lenovor/reweighted-ws,skaasj/reweighted-ws,jbornschein/y2k,jbornschein/reweighted-ws,skaasj/reweighted-ws,yanweifu/reweighted-ws
|
Add script to plot ~LL's
|
#!/usr/bin/env python
from __future__ import division
import sys
import logging
from time import time
import cPickle as pickle
import numpy as np
import h5py
import pylab
#import theano
#import theano.tensor as T
_logger = logging.getLogger()
#=============================================================================
if __name__ == "__main__":
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('out_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
#pylab.figsize(12, 8)
pylab.xlabel("Epochs")
pylab.ylabel("Est. LL")
pylab.ylim([-200, -100])
for out_dir in args.out_dir:
fname = out_dir+"/results.h5"
try:
with h5py.File(fname, "r") as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
LL100 = h5['learning.monitor.100.LL'][:]
pylab.plot(LL100[::2], label=out_dir[-20:])
print "==== %s ====" % out_dir
if 'learning.monitor.10.LL' in h5:
LL10 = h5['learning.monitor.10.LL'][:]
print "Final LL [ 10 samples]: %.2f" % LL10[-1]
print "Final LL [100 samples]: %.2f" % LL100[-1]
if 'learning.monitor.500.LL' in h5:
LL500 = h5['learning.monitor.500.LL'][:]
print "Final LL [500 samples]: %.2f" % LL500[-1]
except KeyError, e:
logger.info("Failed to read data from %s" % fname)
except IOError, e:
logger.info("Failed to open %s fname: %s" % (fname, e))
pylab.legend(loc="lower right")
pylab.show()
|
<commit_before><commit_msg>Add script to plot ~LL's<commit_after>
|
#!/usr/bin/env python
from __future__ import division
import sys
import logging
from time import time
import cPickle as pickle
import numpy as np
import h5py
import pylab
#import theano
#import theano.tensor as T
_logger = logging.getLogger()
#=============================================================================
if __name__ == "__main__":
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('out_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
#pylab.figsize(12, 8)
pylab.xlabel("Epochs")
pylab.ylabel("Est. LL")
pylab.ylim([-200, -100])
for out_dir in args.out_dir:
fname = out_dir+"/results.h5"
try:
with h5py.File(fname, "r") as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
LL100 = h5['learning.monitor.100.LL'][:]
pylab.plot(LL100[::2], label=out_dir[-20:])
print "==== %s ====" % out_dir
if 'learning.monitor.10.LL' in h5:
LL10 = h5['learning.monitor.10.LL'][:]
print "Final LL [ 10 samples]: %.2f" % LL10[-1]
print "Final LL [100 samples]: %.2f" % LL100[-1]
if 'learning.monitor.500.LL' in h5:
LL500 = h5['learning.monitor.500.LL'][:]
print "Final LL [500 samples]: %.2f" % LL500[-1]
except KeyError, e:
logger.info("Failed to read data from %s" % fname)
except IOError, e:
logger.info("Failed to open %s fname: %s" % (fname, e))
pylab.legend(loc="lower right")
pylab.show()
|
Add script to plot ~LL's#!/usr/bin/env python
from __future__ import division
import sys
import logging
from time import time
import cPickle as pickle
import numpy as np
import h5py
import pylab
#import theano
#import theano.tensor as T
_logger = logging.getLogger()
#=============================================================================
if __name__ == "__main__":
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('out_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
#pylab.figsize(12, 8)
pylab.xlabel("Epochs")
pylab.ylabel("Est. LL")
pylab.ylim([-200, -100])
for out_dir in args.out_dir:
fname = out_dir+"/results.h5"
try:
with h5py.File(fname, "r") as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
LL100 = h5['learning.monitor.100.LL'][:]
pylab.plot(LL100[::2], label=out_dir[-20:])
print "==== %s ====" % out_dir
if 'learning.monitor.10.LL' in h5:
LL10 = h5['learning.monitor.10.LL'][:]
print "Final LL [ 10 samples]: %.2f" % LL10[-1]
print "Final LL [100 samples]: %.2f" % LL100[-1]
if 'learning.monitor.500.LL' in h5:
LL500 = h5['learning.monitor.500.LL'][:]
print "Final LL [500 samples]: %.2f" % LL500[-1]
except KeyError, e:
logger.info("Failed to read data from %s" % fname)
except IOError, e:
logger.info("Failed to open %s fname: %s" % (fname, e))
pylab.legend(loc="lower right")
pylab.show()
|
<commit_before><commit_msg>Add script to plot ~LL's<commit_after>#!/usr/bin/env python
from __future__ import division
import sys
import logging
from time import time
import cPickle as pickle
import numpy as np
import h5py
import pylab
#import theano
#import theano.tensor as T
_logger = logging.getLogger()
#=============================================================================
if __name__ == "__main__":
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('out_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
#pylab.figsize(12, 8)
pylab.xlabel("Epochs")
pylab.ylabel("Est. LL")
pylab.ylim([-200, -100])
for out_dir in args.out_dir:
fname = out_dir+"/results.h5"
try:
with h5py.File(fname, "r") as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
LL100 = h5['learning.monitor.100.LL'][:]
pylab.plot(LL100[::2], label=out_dir[-20:])
print "==== %s ====" % out_dir
if 'learning.monitor.10.LL' in h5:
LL10 = h5['learning.monitor.10.LL'][:]
print "Final LL [ 10 samples]: %.2f" % LL10[-1]
print "Final LL [100 samples]: %.2f" % LL100[-1]
if 'learning.monitor.500.LL' in h5:
LL500 = h5['learning.monitor.500.LL'][:]
print "Final LL [500 samples]: %.2f" % LL500[-1]
except KeyError, e:
logger.info("Failed to read data from %s" % fname)
except IOError, e:
logger.info("Failed to open %s fname: %s" % (fname, e))
pylab.legend(loc="lower right")
pylab.show()
|
|
02bbf70940dead81017a741048bfff28a0a488c4
|
model/slide_carousel.py
|
model/slide_carousel.py
|
import json
from numpy.random import choice
def generate_slides():
slides = json.load(open("static/carousel/slides.json", "r"))
weights = [x["weight"] for x in slides]
sum_weights = sum(weights)
weights = [w / float(sum_weights) for w in weights]
num_slides = 5
slides = choice(slides, 5, replace=False, p=weights)
return slides
|
Add slide carousel helper function model.
|
Add slide carousel helper function model.
|
Python
|
apache-2.0
|
JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView
|
Add slide carousel helper function model.
|
import json
from numpy.random import choice
def generate_slides():
slides = json.load(open("static/carousel/slides.json", "r"))
weights = [x["weight"] for x in slides]
sum_weights = sum(weights)
weights = [w / float(sum_weights) for w in weights]
num_slides = 5
slides = choice(slides, 5, replace=False, p=weights)
return slides
|
<commit_before><commit_msg>Add slide carousel helper function model.<commit_after>
|
import json
from numpy.random import choice
def generate_slides():
slides = json.load(open("static/carousel/slides.json", "r"))
weights = [x["weight"] for x in slides]
sum_weights = sum(weights)
weights = [w / float(sum_weights) for w in weights]
num_slides = 5
slides = choice(slides, 5, replace=False, p=weights)
return slides
|
Add slide carousel helper function model.import json
from numpy.random import choice
def generate_slides():
slides = json.load(open("static/carousel/slides.json", "r"))
weights = [x["weight"] for x in slides]
sum_weights = sum(weights)
weights = [w / float(sum_weights) for w in weights]
num_slides = 5
slides = choice(slides, 5, replace=False, p=weights)
return slides
|
<commit_before><commit_msg>Add slide carousel helper function model.<commit_after>import json
from numpy.random import choice
def generate_slides():
slides = json.load(open("static/carousel/slides.json", "r"))
weights = [x["weight"] for x in slides]
sum_weights = sum(weights)
weights = [w / float(sum_weights) for w in weights]
num_slides = 5
slides = choice(slides, 5, replace=False, p=weights)
return slides
|
|
a7b30e1804414f3460a1006a7b92f3d34d0f8090
|
bvg-grabber.py
|
bvg-grabber.py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import sys
from bvggrabber.api.actualdeparture import ActualDepartureQueryApi
from bvggrabber.api.scheduleddeparture import ScheduledDepartureQueryApi, Vehicle
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Query the BVG-website for departures')
parser.add_argument('station', type=str, help='The station to query')
parser.add_argument('file', type=str, help='Path to file')
parser.add_argument('--vehicle', type=str, nargs='*',
help='''Vehicles which shall be queried,
if non given actualdepartue (bus)
will be used''')
parser.add_argument('--limit', type=int, help='Max departures to query')
args = parser.parse_args()
query = None
res = None
if args.vehicle:
vehicles = 0
bus = False
for vehicle in args.vehicle:
if vehicle == 'S':
vehicles |= Vehicle.S
elif vehicle == 'U':
vehicles |= Vehicle.U
elif vehicle == 'TRAM':
vehicles |= Vehicle.TRAM
elif vehicle == 'BUS':
bus = True
elif vehicle == 'FERRY':
vehicles |= Vehicle.FERRY
elif vehicle == 'RB':
vehicles |= Vehicle.RB
elif vehicle == 'IC':
vehicles |= Vehicle.IC
limit = 9
if args.limit:
limit = args.limit
if bus:
aquery = ActualDepartureQueryApi(args.station)
query = ScheduledDepartureQueryApi(args.station, vehicles, limit=limit)
res = query.call()
res2 = aquery.call()
res.merge(res2)
else:
query = ActualDepartureQueryApi(args.station)
res = query.call()
if args.file == 'stdout':
print(res.to_json, file=sys.stdout)
else:
file = open(args.file, 'w')
print(res.to_json, file=file)
file.close()
|
Add a CLI script to fetch departures
|
Add a CLI script to fetch departures
|
Python
|
bsd-3-clause
|
MarkusH/bvg-grabber
|
Add a CLI script to fetch departures
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import sys
from bvggrabber.api.actualdeparture import ActualDepartureQueryApi
from bvggrabber.api.scheduleddeparture import ScheduledDepartureQueryApi, Vehicle
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Query the BVG-website for departures')
parser.add_argument('station', type=str, help='The station to query')
parser.add_argument('file', type=str, help='Path to file')
parser.add_argument('--vehicle', type=str, nargs='*',
help='''Vehicles which shall be queried,
if non given actualdepartue (bus)
will be used''')
parser.add_argument('--limit', type=int, help='Max departures to query')
args = parser.parse_args()
query = None
res = None
if args.vehicle:
vehicles = 0
bus = False
for vehicle in args.vehicle:
if vehicle == 'S':
vehicles |= Vehicle.S
elif vehicle == 'U':
vehicles |= Vehicle.U
elif vehicle == 'TRAM':
vehicles |= Vehicle.TRAM
elif vehicle == 'BUS':
bus = True
elif vehicle == 'FERRY':
vehicles |= Vehicle.FERRY
elif vehicle == 'RB':
vehicles |= Vehicle.RB
elif vehicle == 'IC':
vehicles |= Vehicle.IC
limit = 9
if args.limit:
limit = args.limit
if bus:
aquery = ActualDepartureQueryApi(args.station)
query = ScheduledDepartureQueryApi(args.station, vehicles, limit=limit)
res = query.call()
res2 = aquery.call()
res.merge(res2)
else:
query = ActualDepartureQueryApi(args.station)
res = query.call()
if args.file == 'stdout':
print(res.to_json, file=sys.stdout)
else:
file = open(args.file, 'w')
print(res.to_json, file=file)
file.close()
|
<commit_before><commit_msg>Add a CLI script to fetch departures<commit_after>
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import sys
from bvggrabber.api.actualdeparture import ActualDepartureQueryApi
from bvggrabber.api.scheduleddeparture import ScheduledDepartureQueryApi, Vehicle
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Query the BVG-website for departures')
parser.add_argument('station', type=str, help='The station to query')
parser.add_argument('file', type=str, help='Path to file')
parser.add_argument('--vehicle', type=str, nargs='*',
help='''Vehicles which shall be queried,
if non given actualdepartue (bus)
will be used''')
parser.add_argument('--limit', type=int, help='Max departures to query')
args = parser.parse_args()
query = None
res = None
if args.vehicle:
vehicles = 0
bus = False
for vehicle in args.vehicle:
if vehicle == 'S':
vehicles |= Vehicle.S
elif vehicle == 'U':
vehicles |= Vehicle.U
elif vehicle == 'TRAM':
vehicles |= Vehicle.TRAM
elif vehicle == 'BUS':
bus = True
elif vehicle == 'FERRY':
vehicles |= Vehicle.FERRY
elif vehicle == 'RB':
vehicles |= Vehicle.RB
elif vehicle == 'IC':
vehicles |= Vehicle.IC
limit = 9
if args.limit:
limit = args.limit
if bus:
aquery = ActualDepartureQueryApi(args.station)
query = ScheduledDepartureQueryApi(args.station, vehicles, limit=limit)
res = query.call()
res2 = aquery.call()
res.merge(res2)
else:
query = ActualDepartureQueryApi(args.station)
res = query.call()
if args.file == 'stdout':
print(res.to_json, file=sys.stdout)
else:
file = open(args.file, 'w')
print(res.to_json, file=file)
file.close()
|
Add a CLI script to fetch departures#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import sys
from bvggrabber.api.actualdeparture import ActualDepartureQueryApi
from bvggrabber.api.scheduleddeparture import ScheduledDepartureQueryApi, Vehicle
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Query the BVG-website for departures')
parser.add_argument('station', type=str, help='The station to query')
parser.add_argument('file', type=str, help='Path to file')
parser.add_argument('--vehicle', type=str, nargs='*',
help='''Vehicles which shall be queried,
if non given actualdepartue (bus)
will be used''')
parser.add_argument('--limit', type=int, help='Max departures to query')
args = parser.parse_args()
query = None
res = None
if args.vehicle:
vehicles = 0
bus = False
for vehicle in args.vehicle:
if vehicle == 'S':
vehicles |= Vehicle.S
elif vehicle == 'U':
vehicles |= Vehicle.U
elif vehicle == 'TRAM':
vehicles |= Vehicle.TRAM
elif vehicle == 'BUS':
bus = True
elif vehicle == 'FERRY':
vehicles |= Vehicle.FERRY
elif vehicle == 'RB':
vehicles |= Vehicle.RB
elif vehicle == 'IC':
vehicles |= Vehicle.IC
limit = 9
if args.limit:
limit = args.limit
if bus:
aquery = ActualDepartureQueryApi(args.station)
query = ScheduledDepartureQueryApi(args.station, vehicles, limit=limit)
res = query.call()
res2 = aquery.call()
res.merge(res2)
else:
query = ActualDepartureQueryApi(args.station)
res = query.call()
if args.file == 'stdout':
print(res.to_json, file=sys.stdout)
else:
file = open(args.file, 'w')
print(res.to_json, file=file)
file.close()
|
<commit_before><commit_msg>Add a CLI script to fetch departures<commit_after>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import sys
from bvggrabber.api.actualdeparture import ActualDepartureQueryApi
from bvggrabber.api.scheduleddeparture import ScheduledDepartureQueryApi, Vehicle
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Query the BVG-website for departures')
parser.add_argument('station', type=str, help='The station to query')
parser.add_argument('file', type=str, help='Path to file')
parser.add_argument('--vehicle', type=str, nargs='*',
help='''Vehicles which shall be queried,
if non given actualdepartue (bus)
will be used''')
parser.add_argument('--limit', type=int, help='Max departures to query')
args = parser.parse_args()
query = None
res = None
if args.vehicle:
vehicles = 0
bus = False
for vehicle in args.vehicle:
if vehicle == 'S':
vehicles |= Vehicle.S
elif vehicle == 'U':
vehicles |= Vehicle.U
elif vehicle == 'TRAM':
vehicles |= Vehicle.TRAM
elif vehicle == 'BUS':
bus = True
elif vehicle == 'FERRY':
vehicles |= Vehicle.FERRY
elif vehicle == 'RB':
vehicles |= Vehicle.RB
elif vehicle == 'IC':
vehicles |= Vehicle.IC
limit = 9
if args.limit:
limit = args.limit
if bus:
aquery = ActualDepartureQueryApi(args.station)
query = ScheduledDepartureQueryApi(args.station, vehicles, limit=limit)
res = query.call()
res2 = aquery.call()
res.merge(res2)
else:
query = ActualDepartureQueryApi(args.station)
res = query.call()
if args.file == 'stdout':
print(res.to_json, file=sys.stdout)
else:
file = open(args.file, 'w')
print(res.to_json, file=file)
file.close()
|
|
8780cd754652b2f4b3e16b7eafcf28ec5fb5d63b
|
cogs/command_log.py
|
cogs/command_log.py
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
|
Add a simple command-logging cog
|
Add a simple command-logging cog
|
Python
|
mit
|
Thessia/Liara
|
Add a simple command-logging cog
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
|
<commit_before><commit_msg>Add a simple command-logging cog<commit_after>
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
|
Add a simple command-logging cogimport logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
|
<commit_before><commit_msg>Add a simple command-logging cog<commit_after>import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" in {0.guild}'.format(ctx))
def setup(liara):
liara.add_cog(CommandLog())
|
|
881086ca70eea95ef712aba26a77113de2a2f6a4
|
python/pygtk/python_gtk3_pygobject/search_entry.py
|
python/pygtk/python_gtk3_pygobject/search_entry.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
A simple Python GTK+3 SearchEntry snippet.
API: http://lazka.github.io/pgi-docs/Gtk-3.0/classes/SearchEntry.html
"""
from gi.repository import Gtk as gtk
def print_text(widget, data):
"""
Print the content of the Entry widget.
This is an usage example fo gtk.Entry.get_text().
"""
entry = data # data is a gtk.Entry widget
print(entry.get_text())
def clear_text(widget, data):
"""
Clear the content of the Entry widget.
This is an usage example fo gtk.Entry.set_text().
"""
entry = data # data is a gtk.Entry widget
entry.set_text("")
def main():
window = gtk.Window()
window.set_border_width(10)
# Label and Entry #################
# Label
label = gtk.Label(label="Text to print:")
label.set_alignment(0, 0.5) # Align left
# Entry
entry = gtk.SearchEntry()
# Buttons #########################
# Print button
button1 = gtk.Button(label="Print")
button1.connect("clicked", print_text, entry) # connect("event", callback, data)
# Clean button
button2 = gtk.Button(label="Clear")
button2.connect("clicked", clear_text, entry) # connect("event", callback, data)
# Box containers ##################
horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box1.pack_start(label, expand=True, fill=True, padding=0)
horizontal_box1.pack_start(entry, expand=True, fill=True, padding=0)
horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box2.pack_start(button1, expand=True, fill=True, padding=0)
horizontal_box2.pack_start(button2, expand=True, fill=True, padding=0)
vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing=6) # 6 pixels are placed between children
vertical_box.pack_start(horizontal_box1, expand=True, fill=True, padding=0)
vertical_box.pack_start(horizontal_box2, expand=True, fill=True, padding=0)
window.add(vertical_box)
###
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
Add a widget (Python GTK+3).
|
Add a widget (Python GTK+3).
|
Python
|
mit
|
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
|
Add a widget (Python GTK+3).
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
A simple Python GTK+3 SearchEntry snippet.
API: http://lazka.github.io/pgi-docs/Gtk-3.0/classes/SearchEntry.html
"""
from gi.repository import Gtk as gtk
def print_text(widget, data):
"""
Print the content of the Entry widget.
This is an usage example fo gtk.Entry.get_text().
"""
entry = data # data is a gtk.Entry widget
print(entry.get_text())
def clear_text(widget, data):
"""
Clear the content of the Entry widget.
This is an usage example fo gtk.Entry.set_text().
"""
entry = data # data is a gtk.Entry widget
entry.set_text("")
def main():
window = gtk.Window()
window.set_border_width(10)
# Label and Entry #################
# Label
label = gtk.Label(label="Text to print:")
label.set_alignment(0, 0.5) # Align left
# Entry
entry = gtk.SearchEntry()
# Buttons #########################
# Print button
button1 = gtk.Button(label="Print")
button1.connect("clicked", print_text, entry) # connect("event", callback, data)
# Clean button
button2 = gtk.Button(label="Clear")
button2.connect("clicked", clear_text, entry) # connect("event", callback, data)
# Box containers ##################
horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box1.pack_start(label, expand=True, fill=True, padding=0)
horizontal_box1.pack_start(entry, expand=True, fill=True, padding=0)
horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box2.pack_start(button1, expand=True, fill=True, padding=0)
horizontal_box2.pack_start(button2, expand=True, fill=True, padding=0)
vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing=6) # 6 pixels are placed between children
vertical_box.pack_start(horizontal_box1, expand=True, fill=True, padding=0)
vertical_box.pack_start(horizontal_box2, expand=True, fill=True, padding=0)
window.add(vertical_box)
###
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a widget (Python GTK+3).<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
A simple Python GTK+3 SearchEntry snippet.
API: http://lazka.github.io/pgi-docs/Gtk-3.0/classes/SearchEntry.html
"""
from gi.repository import Gtk as gtk
def print_text(widget, data):
"""
Print the content of the Entry widget.
This is an usage example fo gtk.Entry.get_text().
"""
entry = data # data is a gtk.Entry widget
print(entry.get_text())
def clear_text(widget, data):
"""
Clear the content of the Entry widget.
This is an usage example fo gtk.Entry.set_text().
"""
entry = data # data is a gtk.Entry widget
entry.set_text("")
def main():
window = gtk.Window()
window.set_border_width(10)
# Label and Entry #################
# Label
label = gtk.Label(label="Text to print:")
label.set_alignment(0, 0.5) # Align left
# Entry
entry = gtk.SearchEntry()
# Buttons #########################
# Print button
button1 = gtk.Button(label="Print")
button1.connect("clicked", print_text, entry) # connect("event", callback, data)
# Clean button
button2 = gtk.Button(label="Clear")
button2.connect("clicked", clear_text, entry) # connect("event", callback, data)
# Box containers ##################
horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box1.pack_start(label, expand=True, fill=True, padding=0)
horizontal_box1.pack_start(entry, expand=True, fill=True, padding=0)
horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box2.pack_start(button1, expand=True, fill=True, padding=0)
horizontal_box2.pack_start(button2, expand=True, fill=True, padding=0)
vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing=6) # 6 pixels are placed between children
vertical_box.pack_start(horizontal_box1, expand=True, fill=True, padding=0)
vertical_box.pack_start(horizontal_box2, expand=True, fill=True, padding=0)
window.add(vertical_box)
###
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
Add a widget (Python GTK+3).#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
A simple Python GTK+3 SearchEntry snippet.
API: http://lazka.github.io/pgi-docs/Gtk-3.0/classes/SearchEntry.html
"""
from gi.repository import Gtk as gtk
def print_text(widget, data):
"""
Print the content of the Entry widget.
This is an usage example fo gtk.Entry.get_text().
"""
entry = data # data is a gtk.Entry widget
print(entry.get_text())
def clear_text(widget, data):
"""
Clear the content of the Entry widget.
This is an usage example fo gtk.Entry.set_text().
"""
entry = data # data is a gtk.Entry widget
entry.set_text("")
def main():
window = gtk.Window()
window.set_border_width(10)
# Label and Entry #################
# Label
label = gtk.Label(label="Text to print:")
label.set_alignment(0, 0.5) # Align left
# Entry
entry = gtk.SearchEntry()
# Buttons #########################
# Print button
button1 = gtk.Button(label="Print")
button1.connect("clicked", print_text, entry) # connect("event", callback, data)
# Clean button
button2 = gtk.Button(label="Clear")
button2.connect("clicked", clear_text, entry) # connect("event", callback, data)
# Box containers ##################
horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box1.pack_start(label, expand=True, fill=True, padding=0)
horizontal_box1.pack_start(entry, expand=True, fill=True, padding=0)
horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box2.pack_start(button1, expand=True, fill=True, padding=0)
horizontal_box2.pack_start(button2, expand=True, fill=True, padding=0)
vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing=6) # 6 pixels are placed between children
vertical_box.pack_start(horizontal_box1, expand=True, fill=True, padding=0)
vertical_box.pack_start(horizontal_box2, expand=True, fill=True, padding=0)
window.add(vertical_box)
###
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a widget (Python GTK+3).<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
A simple Python GTK+3 SearchEntry snippet.
API: http://lazka.github.io/pgi-docs/Gtk-3.0/classes/SearchEntry.html
"""
from gi.repository import Gtk as gtk
def print_text(widget, data):
"""
Print the content of the Entry widget.
This is an usage example fo gtk.Entry.get_text().
"""
entry = data # data is a gtk.Entry widget
print(entry.get_text())
def clear_text(widget, data):
"""
Clear the content of the Entry widget.
This is an usage example fo gtk.Entry.set_text().
"""
entry = data # data is a gtk.Entry widget
entry.set_text("")
def main():
window = gtk.Window()
window.set_border_width(10)
# Label and Entry #################
# Label
label = gtk.Label(label="Text to print:")
label.set_alignment(0, 0.5) # Align left
# Entry
entry = gtk.SearchEntry()
# Buttons #########################
# Print button
button1 = gtk.Button(label="Print")
button1.connect("clicked", print_text, entry) # connect("event", callback, data)
# Clean button
button2 = gtk.Button(label="Clear")
button2.connect("clicked", clear_text, entry) # connect("event", callback, data)
# Box containers ##################
horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box1.pack_start(label, expand=True, fill=True, padding=0)
horizontal_box1.pack_start(entry, expand=True, fill=True, padding=0)
horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing=6) # 6 pixels are placed between children
horizontal_box2.pack_start(button1, expand=True, fill=True, padding=0)
horizontal_box2.pack_start(button2, expand=True, fill=True, padding=0)
vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing=6) # 6 pixels are placed between children
vertical_box.pack_start(horizontal_box1, expand=True, fill=True, padding=0)
vertical_box.pack_start(horizontal_box2, expand=True, fill=True, padding=0)
window.add(vertical_box)
###
window.connect("delete-event", gtk.main_quit) # ask to quit the application when the close button is clicked
window.show_all() # display the window
gtk.main() # GTK+ main loop
if __name__ == '__main__':
main()
|
|
030cd9181965366f2b0427a2bc2d9e569fd3cd17
|
tests/test_data_handler.py
|
tests/test_data_handler.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from utils.data_handler import DataHandler
src_rs = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-10.zip"
src_po = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-13.zip"
def test_find_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_files_rs = dh_rs._get_contents()
html_files_po = dh_po._get_contents()
assert len(html_files_rs) == 16
assert len(html_files_po) == 24
def test_find_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_files_rs = dh_rs._get_contents('json')
json_files_po = dh_po._get_contents('.JSON')
assert len(json_files_rs) == 2
assert len(json_files_po) == 3
def test_get_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_data_rs = dh_rs.get_game_data(dh_rs.game_ids[0])
html_data = open(html_data_rs['ES']).read()
assert "Event Summary" in html_data
assert "Game %s" % dh_rs.game_ids[0][2:] in html_data
dh_rs.clear_temp_files()
html_data_po = dh_po.get_game_data(dh_po.game_ids[-1], 'GS')
html_data = open(html_data_po['GS']).read()
assert "Game Summary" in html_data
assert "Game %s" % dh_po.game_ids[-1][2:] in html_data
dh_po.clear_temp_files()
def test_get_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_data_rs = dh_rs.get_game_json_data(dh_rs.game_ids[-1])
json_data = json.loads(open(json_data_rs).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_rs.game_ids[-1]
dh_rs.clear_temp_files()
json_data_po = dh_po.get_game_json_data(dh_po.game_ids[0])
json_data = json.loads(open(json_data_po).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_po.game_ids[0]
dh_po.clear_temp_files()
|
Add tests for data handler items
|
Add tests for data handler items
|
Python
|
mit
|
leaffan/pynhldb
|
Add tests for data handler items
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from utils.data_handler import DataHandler
src_rs = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-10.zip"
src_po = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-13.zip"
def test_find_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_files_rs = dh_rs._get_contents()
html_files_po = dh_po._get_contents()
assert len(html_files_rs) == 16
assert len(html_files_po) == 24
def test_find_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_files_rs = dh_rs._get_contents('json')
json_files_po = dh_po._get_contents('.JSON')
assert len(json_files_rs) == 2
assert len(json_files_po) == 3
def test_get_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_data_rs = dh_rs.get_game_data(dh_rs.game_ids[0])
html_data = open(html_data_rs['ES']).read()
assert "Event Summary" in html_data
assert "Game %s" % dh_rs.game_ids[0][2:] in html_data
dh_rs.clear_temp_files()
html_data_po = dh_po.get_game_data(dh_po.game_ids[-1], 'GS')
html_data = open(html_data_po['GS']).read()
assert "Game Summary" in html_data
assert "Game %s" % dh_po.game_ids[-1][2:] in html_data
dh_po.clear_temp_files()
def test_get_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_data_rs = dh_rs.get_game_json_data(dh_rs.game_ids[-1])
json_data = json.loads(open(json_data_rs).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_rs.game_ids[-1]
dh_rs.clear_temp_files()
json_data_po = dh_po.get_game_json_data(dh_po.game_ids[0])
json_data = json.loads(open(json_data_po).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_po.game_ids[0]
dh_po.clear_temp_files()
|
<commit_before><commit_msg>Add tests for data handler items<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from utils.data_handler import DataHandler
src_rs = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-10.zip"
src_po = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-13.zip"
def test_find_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_files_rs = dh_rs._get_contents()
html_files_po = dh_po._get_contents()
assert len(html_files_rs) == 16
assert len(html_files_po) == 24
def test_find_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_files_rs = dh_rs._get_contents('json')
json_files_po = dh_po._get_contents('.JSON')
assert len(json_files_rs) == 2
assert len(json_files_po) == 3
def test_get_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_data_rs = dh_rs.get_game_data(dh_rs.game_ids[0])
html_data = open(html_data_rs['ES']).read()
assert "Event Summary" in html_data
assert "Game %s" % dh_rs.game_ids[0][2:] in html_data
dh_rs.clear_temp_files()
html_data_po = dh_po.get_game_data(dh_po.game_ids[-1], 'GS')
html_data = open(html_data_po['GS']).read()
assert "Game Summary" in html_data
assert "Game %s" % dh_po.game_ids[-1][2:] in html_data
dh_po.clear_temp_files()
def test_get_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_data_rs = dh_rs.get_game_json_data(dh_rs.game_ids[-1])
json_data = json.loads(open(json_data_rs).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_rs.game_ids[-1]
dh_rs.clear_temp_files()
json_data_po = dh_po.get_game_json_data(dh_po.game_ids[0])
json_data = json.loads(open(json_data_po).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_po.game_ids[0]
dh_po.clear_temp_files()
|
Add tests for data handler items#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from utils.data_handler import DataHandler
src_rs = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-10.zip"
src_po = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-13.zip"
def test_find_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_files_rs = dh_rs._get_contents()
html_files_po = dh_po._get_contents()
assert len(html_files_rs) == 16
assert len(html_files_po) == 24
def test_find_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_files_rs = dh_rs._get_contents('json')
json_files_po = dh_po._get_contents('.JSON')
assert len(json_files_rs) == 2
assert len(json_files_po) == 3
def test_get_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_data_rs = dh_rs.get_game_data(dh_rs.game_ids[0])
html_data = open(html_data_rs['ES']).read()
assert "Event Summary" in html_data
assert "Game %s" % dh_rs.game_ids[0][2:] in html_data
dh_rs.clear_temp_files()
html_data_po = dh_po.get_game_data(dh_po.game_ids[-1], 'GS')
html_data = open(html_data_po['GS']).read()
assert "Game Summary" in html_data
assert "Game %s" % dh_po.game_ids[-1][2:] in html_data
dh_po.clear_temp_files()
def test_get_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_data_rs = dh_rs.get_game_json_data(dh_rs.game_ids[-1])
json_data = json.loads(open(json_data_rs).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_rs.game_ids[-1]
dh_rs.clear_temp_files()
json_data_po = dh_po.get_game_json_data(dh_po.game_ids[0])
json_data = json.loads(open(json_data_po).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_po.game_ids[0]
dh_po.clear_temp_files()
|
<commit_before><commit_msg>Add tests for data handler items<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from utils.data_handler import DataHandler
src_rs = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-10.zip"
src_po = r"D:\nhl\official_and_json\_2015-16\2016-04\2016-04-13.zip"
def test_find_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_files_rs = dh_rs._get_contents()
html_files_po = dh_po._get_contents()
assert len(html_files_rs) == 16
assert len(html_files_po) == 24
def test_find_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_files_rs = dh_rs._get_contents('json')
json_files_po = dh_po._get_contents('.JSON')
assert len(json_files_rs) == 2
assert len(json_files_po) == 3
def test_get_html_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
html_data_rs = dh_rs.get_game_data(dh_rs.game_ids[0])
html_data = open(html_data_rs['ES']).read()
assert "Event Summary" in html_data
assert "Game %s" % dh_rs.game_ids[0][2:] in html_data
dh_rs.clear_temp_files()
html_data_po = dh_po.get_game_data(dh_po.game_ids[-1], 'GS')
html_data = open(html_data_po['GS']).read()
assert "Game Summary" in html_data
assert "Game %s" % dh_po.game_ids[-1][2:] in html_data
dh_po.clear_temp_files()
def test_get_json_data():
dh_rs = DataHandler(src_rs)
dh_po = DataHandler(src_po)
json_data_rs = dh_rs.get_game_json_data(dh_rs.game_ids[-1])
json_data = json.loads(open(json_data_rs).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_rs.game_ids[-1]
dh_rs.clear_temp_files()
json_data_po = dh_po.get_game_json_data(dh_po.game_ids[0])
json_data = json.loads(open(json_data_po).read())
assert str(json_data['gameData']['game']['pk'])[4:] == dh_po.game_ids[0]
dh_po.clear_temp_files()
|
|
3041ee3026b79b7b840d597ba67ebe6b5020fa7a
|
pyisemail/test/test_email_validator.py
|
pyisemail/test/test_email_validator.py
|
import unittest
from pyisemail import EmailValidator
class EmailValidatorTest(unittest.TestCase):
def test_abstract_is_email(self):
v = EmailValidator()
self.assertRaises(NotImplementedError, v.is_email, "test@example.com")
|
Make sure EmailValidator.is_email raises NotImplementedError.
|
Make sure EmailValidator.is_email raises NotImplementedError.
|
Python
|
mit
|
michaelherold/pyIsEmail,michaelherold/pyIsEmail
|
Make sure EmailValidator.is_email raises NotImplementedError.
|
import unittest
from pyisemail import EmailValidator
class EmailValidatorTest(unittest.TestCase):
def test_abstract_is_email(self):
v = EmailValidator()
self.assertRaises(NotImplementedError, v.is_email, "test@example.com")
|
<commit_before><commit_msg>Make sure EmailValidator.is_email raises NotImplementedError.<commit_after>
|
import unittest
from pyisemail import EmailValidator
class EmailValidatorTest(unittest.TestCase):
def test_abstract_is_email(self):
v = EmailValidator()
self.assertRaises(NotImplementedError, v.is_email, "test@example.com")
|
Make sure EmailValidator.is_email raises NotImplementedError.import unittest
from pyisemail import EmailValidator
class EmailValidatorTest(unittest.TestCase):
def test_abstract_is_email(self):
v = EmailValidator()
self.assertRaises(NotImplementedError, v.is_email, "test@example.com")
|
<commit_before><commit_msg>Make sure EmailValidator.is_email raises NotImplementedError.<commit_after>import unittest
from pyisemail import EmailValidator
class EmailValidatorTest(unittest.TestCase):
def test_abstract_is_email(self):
v = EmailValidator()
self.assertRaises(NotImplementedError, v.is_email, "test@example.com")
|
|
b7de7d6c5ec8509331be6cfb63fc0f591c256412
|
tests/test_symbolic_data.py
|
tests/test_symbolic_data.py
|
from devito import DenseData, TimeData
from sympy import symbols, Derivative, as_finite_diff
import pytest
import numpy as np
@pytest.fixture
def shape(xdim=20, ydim=30):
return (xdim, ydim)
@pytest.mark.parametrize('SymbolType, dimension', [
(DenseData, 'x'), (DenseData, 'y'),
(TimeData, 'x'), (TimeData, 'y'), (TimeData, 't'),
])
def test_stencil_derivative(shape, SymbolType, dimension):
"""Test symbolic behaviour when expanding stencil derivatives"""
x, h = symbols('%s h' % dimension)
u = SymbolType(name='u', shape=shape)
u.data[:] = 66.6
dx = u.diff(x)
dxx = u.diff(x, x)
# Check for sympy Derivative objects
assert(isinstance(dx, Derivative) and isinstance(dxx, Derivative))
s_dx = as_finite_diff(dx, [x - h, x])
s_dxx = as_finite_diff(dxx, [x - h, x, x + h])
# Check stencil length of first and second derivatives
assert(len(s_dx.args) == 2 and len(s_dxx.args) == 3)
u_dx = s_dx.args[0].args[1]
u_dxx = s_dx.args[0].args[1]
# Ensure that devito meta-data survived symbolic transformation
assert(u_dx.shape[-2:] == shape and u_dxx.shape[-2:] == shape)
assert(np.allclose(u_dx.data, 66.6))
assert(np.allclose(u_dxx.data, 66.6))
@pytest.mark.parametrize('SymbolType, derivative, dim', [
(DenseData, 'dx2', 3), (DenseData, 'dy2', 3),
(TimeData, 'dx2', 3), (TimeData, 'dy2', 3), (TimeData, 'dt', 2)
])
def test_preformed_derivatives(shape, SymbolType, derivative, dim):
"""Test the stencil expressions provided by devito objects"""
u = SymbolType(name='u', shape=shape)
expr = getattr(u, derivative)
assert(len(expr.args) == dim)
|
Add tests for symbolic differentiation of data objects
|
SymbolicData: Add tests for symbolic differentiation of data objects
|
Python
|
mit
|
opesci/devito,opesci/devito
|
SymbolicData: Add tests for symbolic differentiation of data objects
|
from devito import DenseData, TimeData
from sympy import symbols, Derivative, as_finite_diff
import pytest
import numpy as np
@pytest.fixture
def shape(xdim=20, ydim=30):
return (xdim, ydim)
@pytest.mark.parametrize('SymbolType, dimension', [
(DenseData, 'x'), (DenseData, 'y'),
(TimeData, 'x'), (TimeData, 'y'), (TimeData, 't'),
])
def test_stencil_derivative(shape, SymbolType, dimension):
"""Test symbolic behaviour when expanding stencil derivatives"""
x, h = symbols('%s h' % dimension)
u = SymbolType(name='u', shape=shape)
u.data[:] = 66.6
dx = u.diff(x)
dxx = u.diff(x, x)
# Check for sympy Derivative objects
assert(isinstance(dx, Derivative) and isinstance(dxx, Derivative))
s_dx = as_finite_diff(dx, [x - h, x])
s_dxx = as_finite_diff(dxx, [x - h, x, x + h])
# Check stencil length of first and second derivatives
assert(len(s_dx.args) == 2 and len(s_dxx.args) == 3)
u_dx = s_dx.args[0].args[1]
u_dxx = s_dx.args[0].args[1]
# Ensure that devito meta-data survived symbolic transformation
assert(u_dx.shape[-2:] == shape and u_dxx.shape[-2:] == shape)
assert(np.allclose(u_dx.data, 66.6))
assert(np.allclose(u_dxx.data, 66.6))
@pytest.mark.parametrize('SymbolType, derivative, dim', [
(DenseData, 'dx2', 3), (DenseData, 'dy2', 3),
(TimeData, 'dx2', 3), (TimeData, 'dy2', 3), (TimeData, 'dt', 2)
])
def test_preformed_derivatives(shape, SymbolType, derivative, dim):
"""Test the stencil expressions provided by devito objects"""
u = SymbolType(name='u', shape=shape)
expr = getattr(u, derivative)
assert(len(expr.args) == dim)
|
<commit_before><commit_msg>SymbolicData: Add tests for symbolic differentiation of data objects<commit_after>
|
from devito import DenseData, TimeData
from sympy import symbols, Derivative, as_finite_diff
import pytest
import numpy as np
@pytest.fixture
def shape(xdim=20, ydim=30):
return (xdim, ydim)
@pytest.mark.parametrize('SymbolType, dimension', [
(DenseData, 'x'), (DenseData, 'y'),
(TimeData, 'x'), (TimeData, 'y'), (TimeData, 't'),
])
def test_stencil_derivative(shape, SymbolType, dimension):
"""Test symbolic behaviour when expanding stencil derivatives"""
x, h = symbols('%s h' % dimension)
u = SymbolType(name='u', shape=shape)
u.data[:] = 66.6
dx = u.diff(x)
dxx = u.diff(x, x)
# Check for sympy Derivative objects
assert(isinstance(dx, Derivative) and isinstance(dxx, Derivative))
s_dx = as_finite_diff(dx, [x - h, x])
s_dxx = as_finite_diff(dxx, [x - h, x, x + h])
# Check stencil length of first and second derivatives
assert(len(s_dx.args) == 2 and len(s_dxx.args) == 3)
u_dx = s_dx.args[0].args[1]
u_dxx = s_dx.args[0].args[1]
# Ensure that devito meta-data survived symbolic transformation
assert(u_dx.shape[-2:] == shape and u_dxx.shape[-2:] == shape)
assert(np.allclose(u_dx.data, 66.6))
assert(np.allclose(u_dxx.data, 66.6))
@pytest.mark.parametrize('SymbolType, derivative, dim', [
(DenseData, 'dx2', 3), (DenseData, 'dy2', 3),
(TimeData, 'dx2', 3), (TimeData, 'dy2', 3), (TimeData, 'dt', 2)
])
def test_preformed_derivatives(shape, SymbolType, derivative, dim):
"""Test the stencil expressions provided by devito objects"""
u = SymbolType(name='u', shape=shape)
expr = getattr(u, derivative)
assert(len(expr.args) == dim)
|
SymbolicData: Add tests for symbolic differentiation of data objectsfrom devito import DenseData, TimeData
from sympy import symbols, Derivative, as_finite_diff
import pytest
import numpy as np
@pytest.fixture
def shape(xdim=20, ydim=30):
return (xdim, ydim)
@pytest.mark.parametrize('SymbolType, dimension', [
(DenseData, 'x'), (DenseData, 'y'),
(TimeData, 'x'), (TimeData, 'y'), (TimeData, 't'),
])
def test_stencil_derivative(shape, SymbolType, dimension):
"""Test symbolic behaviour when expanding stencil derivatives"""
x, h = symbols('%s h' % dimension)
u = SymbolType(name='u', shape=shape)
u.data[:] = 66.6
dx = u.diff(x)
dxx = u.diff(x, x)
# Check for sympy Derivative objects
assert(isinstance(dx, Derivative) and isinstance(dxx, Derivative))
s_dx = as_finite_diff(dx, [x - h, x])
s_dxx = as_finite_diff(dxx, [x - h, x, x + h])
# Check stencil length of first and second derivatives
assert(len(s_dx.args) == 2 and len(s_dxx.args) == 3)
u_dx = s_dx.args[0].args[1]
u_dxx = s_dx.args[0].args[1]
# Ensure that devito meta-data survived symbolic transformation
assert(u_dx.shape[-2:] == shape and u_dxx.shape[-2:] == shape)
assert(np.allclose(u_dx.data, 66.6))
assert(np.allclose(u_dxx.data, 66.6))
@pytest.mark.parametrize('SymbolType, derivative, dim', [
(DenseData, 'dx2', 3), (DenseData, 'dy2', 3),
(TimeData, 'dx2', 3), (TimeData, 'dy2', 3), (TimeData, 'dt', 2)
])
def test_preformed_derivatives(shape, SymbolType, derivative, dim):
"""Test the stencil expressions provided by devito objects"""
u = SymbolType(name='u', shape=shape)
expr = getattr(u, derivative)
assert(len(expr.args) == dim)
|
<commit_before><commit_msg>SymbolicData: Add tests for symbolic differentiation of data objects<commit_after>from devito import DenseData, TimeData
from sympy import symbols, Derivative, as_finite_diff
import pytest
import numpy as np
@pytest.fixture
def shape(xdim=20, ydim=30):
return (xdim, ydim)
@pytest.mark.parametrize('SymbolType, dimension', [
(DenseData, 'x'), (DenseData, 'y'),
(TimeData, 'x'), (TimeData, 'y'), (TimeData, 't'),
])
def test_stencil_derivative(shape, SymbolType, dimension):
"""Test symbolic behaviour when expanding stencil derivatives"""
x, h = symbols('%s h' % dimension)
u = SymbolType(name='u', shape=shape)
u.data[:] = 66.6
dx = u.diff(x)
dxx = u.diff(x, x)
# Check for sympy Derivative objects
assert(isinstance(dx, Derivative) and isinstance(dxx, Derivative))
s_dx = as_finite_diff(dx, [x - h, x])
s_dxx = as_finite_diff(dxx, [x - h, x, x + h])
# Check stencil length of first and second derivatives
assert(len(s_dx.args) == 2 and len(s_dxx.args) == 3)
u_dx = s_dx.args[0].args[1]
u_dxx = s_dx.args[0].args[1]
# Ensure that devito meta-data survived symbolic transformation
assert(u_dx.shape[-2:] == shape and u_dxx.shape[-2:] == shape)
assert(np.allclose(u_dx.data, 66.6))
assert(np.allclose(u_dxx.data, 66.6))
@pytest.mark.parametrize('SymbolType, derivative, dim', [
(DenseData, 'dx2', 3), (DenseData, 'dy2', 3),
(TimeData, 'dx2', 3), (TimeData, 'dy2', 3), (TimeData, 'dt', 2)
])
def test_preformed_derivatives(shape, SymbolType, derivative, dim):
"""Test the stencil expressions provided by devito objects"""
u = SymbolType(name='u', shape=shape)
expr = getattr(u, derivative)
assert(len(expr.args) == dim)
|
|
4911788e869510c76cd5d4a4ea593fc0769d41d2
|
scripts/release/update_rnacentral_description.py
|
scripts/release/update_rnacentral_description.py
|
"""
Update all RNAcentral descriptions to the latest version using RNAcentral API.
Usage:
python update_rnacentral_descriptions.py
"""
import requests
from utils import RfamDB
def update_description(cursor, cnx, rfamseq_acc, description):
"""
Update rfamseq description for a sequence ID.
"""
sql = """UPDATE rfamseq
SET description=%s
WHERE
rfamseq_acc=%s
"""
cursor.execute(sql, (description, rfamseq_acc))
cnx.commit()
def get_rnacentral_ids(cursor):
"""
Get a list of RNAcentral IDs from the rfamseq table.
"""
data = []
sql = """SELECT rfamseq_acc
FROM rfamseq
WHERE
rfamseq_acc LIKE 'URS00%'"""
cursor.execute(sql)
for result in cursor.fetchall():
data.append(result)
print 'Found {} RNAcentral IDs'.format(len(data))
return data
def update_descriptions(cursor, cnx):
"""
Update all RNAcentral descriptions to the latest version.
"""
found = 0
not_found = 0
for entry in get_rnacentral_ids(cursor):
url = 'http://www.ebi.ac.uk/ebisearch/ws/rest/rnacentral?query={} AND entry_type:sequence&fields=description&format=json'
rnacentral_id = entry[0]
data = requests.get(url.format(rnacentral_id))
if data.json()['hitCount'] == 1:
description = data.json()['entries'][0]['fields']['description'][0]
print('{}: {}'.format(rnacentral_id, description))
update_description(cursor, cnx, rnacentral_id, description)
found += 1
else:
print('No description found for {}'.format(rnacentral_id))
not_found += 1
print('Updated {} descriptions, not found {} descriptions'.format(found, not_found))
def main():
"""
Main entry point.
"""
cnx = RfamDB.connect()
cursor = cnx.cursor(buffered=True)
update_descriptions(cursor, cnx)
cursor.close()
RfamDB.disconnect(cnx)
if __name__ == '__main__':
main()
|
Add a script to update RNAcentral descriptions
|
Add a script to update RNAcentral descriptions
|
Python
|
apache-2.0
|
Rfam/rfam-production,Rfam/rfam-production,Rfam/rfam-production
|
Add a script to update RNAcentral descriptions
|
"""
Update all RNAcentral descriptions to the latest version using RNAcentral API.
Usage:
python update_rnacentral_descriptions.py
"""
import requests
from utils import RfamDB
def update_description(cursor, cnx, rfamseq_acc, description):
"""
Update rfamseq description for a sequence ID.
"""
sql = """UPDATE rfamseq
SET description=%s
WHERE
rfamseq_acc=%s
"""
cursor.execute(sql, (description, rfamseq_acc))
cnx.commit()
def get_rnacentral_ids(cursor):
"""
Get a list of RNAcentral IDs from the rfamseq table.
"""
data = []
sql = """SELECT rfamseq_acc
FROM rfamseq
WHERE
rfamseq_acc LIKE 'URS00%'"""
cursor.execute(sql)
for result in cursor.fetchall():
data.append(result)
print 'Found {} RNAcentral IDs'.format(len(data))
return data
def update_descriptions(cursor, cnx):
"""
Update all RNAcentral descriptions to the latest version.
"""
found = 0
not_found = 0
for entry in get_rnacentral_ids(cursor):
url = 'http://www.ebi.ac.uk/ebisearch/ws/rest/rnacentral?query={} AND entry_type:sequence&fields=description&format=json'
rnacentral_id = entry[0]
data = requests.get(url.format(rnacentral_id))
if data.json()['hitCount'] == 1:
description = data.json()['entries'][0]['fields']['description'][0]
print('{}: {}'.format(rnacentral_id, description))
update_description(cursor, cnx, rnacentral_id, description)
found += 1
else:
print('No description found for {}'.format(rnacentral_id))
not_found += 1
print('Updated {} descriptions, not found {} descriptions'.format(found, not_found))
def main():
"""
Main entry point.
"""
cnx = RfamDB.connect()
cursor = cnx.cursor(buffered=True)
update_descriptions(cursor, cnx)
cursor.close()
RfamDB.disconnect(cnx)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to update RNAcentral descriptions<commit_after>
|
"""
Update all RNAcentral descriptions to the latest version using RNAcentral API.
Usage:
python update_rnacentral_descriptions.py
"""
import requests
from utils import RfamDB
def update_description(cursor, cnx, rfamseq_acc, description):
"""
Update rfamseq description for a sequence ID.
"""
sql = """UPDATE rfamseq
SET description=%s
WHERE
rfamseq_acc=%s
"""
cursor.execute(sql, (description, rfamseq_acc))
cnx.commit()
def get_rnacentral_ids(cursor):
"""
Get a list of RNAcentral IDs from the rfamseq table.
"""
data = []
sql = """SELECT rfamseq_acc
FROM rfamseq
WHERE
rfamseq_acc LIKE 'URS00%'"""
cursor.execute(sql)
for result in cursor.fetchall():
data.append(result)
print 'Found {} RNAcentral IDs'.format(len(data))
return data
def update_descriptions(cursor, cnx):
"""
Update all RNAcentral descriptions to the latest version.
"""
found = 0
not_found = 0
for entry in get_rnacentral_ids(cursor):
url = 'http://www.ebi.ac.uk/ebisearch/ws/rest/rnacentral?query={} AND entry_type:sequence&fields=description&format=json'
rnacentral_id = entry[0]
data = requests.get(url.format(rnacentral_id))
if data.json()['hitCount'] == 1:
description = data.json()['entries'][0]['fields']['description'][0]
print('{}: {}'.format(rnacentral_id, description))
update_description(cursor, cnx, rnacentral_id, description)
found += 1
else:
print('No description found for {}'.format(rnacentral_id))
not_found += 1
print('Updated {} descriptions, not found {} descriptions'.format(found, not_found))
def main():
"""
Main entry point.
"""
cnx = RfamDB.connect()
cursor = cnx.cursor(buffered=True)
update_descriptions(cursor, cnx)
cursor.close()
RfamDB.disconnect(cnx)
if __name__ == '__main__':
main()
|
Add a script to update RNAcentral descriptions"""
Update all RNAcentral descriptions to the latest version using RNAcentral API.
Usage:
python update_rnacentral_descriptions.py
"""
import requests
from utils import RfamDB
def update_description(cursor, cnx, rfamseq_acc, description):
"""
Update rfamseq description for a sequence ID.
"""
sql = """UPDATE rfamseq
SET description=%s
WHERE
rfamseq_acc=%s
"""
cursor.execute(sql, (description, rfamseq_acc))
cnx.commit()
def get_rnacentral_ids(cursor):
"""
Get a list of RNAcentral IDs from the rfamseq table.
"""
data = []
sql = """SELECT rfamseq_acc
FROM rfamseq
WHERE
rfamseq_acc LIKE 'URS00%'"""
cursor.execute(sql)
for result in cursor.fetchall():
data.append(result)
print 'Found {} RNAcentral IDs'.format(len(data))
return data
def update_descriptions(cursor, cnx):
"""
Update all RNAcentral descriptions to the latest version.
"""
found = 0
not_found = 0
for entry in get_rnacentral_ids(cursor):
url = 'http://www.ebi.ac.uk/ebisearch/ws/rest/rnacentral?query={} AND entry_type:sequence&fields=description&format=json'
rnacentral_id = entry[0]
data = requests.get(url.format(rnacentral_id))
if data.json()['hitCount'] == 1:
description = data.json()['entries'][0]['fields']['description'][0]
print('{}: {}'.format(rnacentral_id, description))
update_description(cursor, cnx, rnacentral_id, description)
found += 1
else:
print('No description found for {}'.format(rnacentral_id))
not_found += 1
print('Updated {} descriptions, not found {} descriptions'.format(found, not_found))
def main():
"""
Main entry point.
"""
cnx = RfamDB.connect()
cursor = cnx.cursor(buffered=True)
update_descriptions(cursor, cnx)
cursor.close()
RfamDB.disconnect(cnx)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to update RNAcentral descriptions<commit_after>"""
Update all RNAcentral descriptions to the latest version using RNAcentral API.
Usage:
python update_rnacentral_descriptions.py
"""
import requests
from utils import RfamDB
def update_description(cursor, cnx, rfamseq_acc, description):
"""
Update rfamseq description for a sequence ID.
"""
sql = """UPDATE rfamseq
SET description=%s
WHERE
rfamseq_acc=%s
"""
cursor.execute(sql, (description, rfamseq_acc))
cnx.commit()
def get_rnacentral_ids(cursor):
"""
Get a list of RNAcentral IDs from the rfamseq table.
"""
data = []
sql = """SELECT rfamseq_acc
FROM rfamseq
WHERE
rfamseq_acc LIKE 'URS00%'"""
cursor.execute(sql)
for result in cursor.fetchall():
data.append(result)
print 'Found {} RNAcentral IDs'.format(len(data))
return data
def update_descriptions(cursor, cnx):
"""
Update all RNAcentral descriptions to the latest version.
"""
found = 0
not_found = 0
for entry in get_rnacentral_ids(cursor):
url = 'http://www.ebi.ac.uk/ebisearch/ws/rest/rnacentral?query={} AND entry_type:sequence&fields=description&format=json'
rnacentral_id = entry[0]
data = requests.get(url.format(rnacentral_id))
if data.json()['hitCount'] == 1:
description = data.json()['entries'][0]['fields']['description'][0]
print('{}: {}'.format(rnacentral_id, description))
update_description(cursor, cnx, rnacentral_id, description)
found += 1
else:
print('No description found for {}'.format(rnacentral_id))
not_found += 1
print('Updated {} descriptions, not found {} descriptions'.format(found, not_found))
def main():
"""
Main entry point.
"""
cnx = RfamDB.connect()
cursor = cnx.cursor(buffered=True)
update_descriptions(cursor, cnx)
cursor.close()
RfamDB.disconnect(cnx)
if __name__ == '__main__':
main()
|
|
34c1c4789de07a2b5044940210618103327f3c37
|
astrotools/interpolate.py
|
astrotools/interpolate.py
|
"""
Set of routines and classes for interpolating light curves.
There include Gaussian Processes and Spline interpolation
"""
import george
from george.kernels import Matern32Kernel
class GP:
def __init__(self):
self.gp = None
def fit(self, data, kernel_size=1000):
self.gp = george.GP(Matern32Kernel(kernel_size))
self.gp.compute(data['mjd'], data['flux_err'])
|
Create an outline for the gaussian process interpolation class.
|
Create an outline for the gaussian process interpolation class.
|
Python
|
mit
|
SzymonPrajs/astrotools
|
Create an outline for the gaussian process interpolation class.
|
"""
Set of routines and classes for interpolating light curves.
There include Gaussian Processes and Spline interpolation
"""
import george
from george.kernels import Matern32Kernel
class GP:
def __init__(self):
self.gp = None
def fit(self, data, kernel_size=1000):
self.gp = george.GP(Matern32Kernel(kernel_size))
self.gp.compute(data['mjd'], data['flux_err'])
|
<commit_before><commit_msg>Create an outline for the gaussian process interpolation class.<commit_after>
|
"""
Set of routines and classes for interpolating light curves.
There include Gaussian Processes and Spline interpolation
"""
import george
from george.kernels import Matern32Kernel
class GP:
def __init__(self):
self.gp = None
def fit(self, data, kernel_size=1000):
self.gp = george.GP(Matern32Kernel(kernel_size))
self.gp.compute(data['mjd'], data['flux_err'])
|
Create an outline for the gaussian process interpolation class."""
Set of routines and classes for interpolating light curves.
There include Gaussian Processes and Spline interpolation
"""
import george
from george.kernels import Matern32Kernel
class GP:
def __init__(self):
self.gp = None
def fit(self, data, kernel_size=1000):
self.gp = george.GP(Matern32Kernel(kernel_size))
self.gp.compute(data['mjd'], data['flux_err'])
|
<commit_before><commit_msg>Create an outline for the gaussian process interpolation class.<commit_after>"""
Set of routines and classes for interpolating light curves.
There include Gaussian Processes and Spline interpolation
"""
import george
from george.kernels import Matern32Kernel
class GP:
def __init__(self):
self.gp = None
def fit(self, data, kernel_size=1000):
self.gp = george.GP(Matern32Kernel(kernel_size))
self.gp.compute(data['mjd'], data['flux_err'])
|
|
d6590c14c16f30cd2802917183fe17de7f0eabf0
|
bin/result_updater.py
|
bin/result_updater.py
|
#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk datastore changes."""
__author__ = 'slamm@google.com (Stephen Lamm)'
import getopt
import getpass
import os
import simplejson
import sys
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('%s/..' % DIR_PATH)
from appengine_tools import appengine_rpc
UPDATER_URL_PATH = '/admin/update_result_parents'
class ResultUpdater(object):
def __init__(self, host, path, user, bookmark=None):
self.path = path
self.user = user
user_agent = None
# TODO: figure out a value for 'source'.
# Doc says, "The source to specify in authentication requests."
source = ''
self.rpc_server = appengine_rpc.HttpRpcServer(
host, self.GetCredentials, user_agent, source, save_cookies=True)
def GetCredentials(self):
# TODO: Grab email/password from config
return self.user, getpass.getpass('Password for %s: ' % self.user)
def Send(self, bookmark, total_scanned, total_updated):
response_data = self.rpc_server.Send(self.path, simplejson.dumps(
(bookmark, total_scanned, total_updated)))
return simplejson.loads(response_data)
def main(argv):
options, args = getopt.getopt(
argv[1:],
'h:u:',
['host=', 'gae_user='])
host = None
gae_user = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-u', '--gae_user'):
gae_user = option_value
updater = ResultUpdater(host, UPDATER_URL_PATH, user=gae_user)
bookmark = None
total_scanned = 0
total_updated = 0
while 1:
print 'Update batch: %s (total_scanned=%s, total_updated=%s)' % (
bookmark or 'no_bookmark', total_scanned, total_updated)
bookmark, total_scanned, total_updated = updater.Send(
bookmark, total_scanned, total_updated)
if not bookmark:
break
if __name__ == '__main__':
main(sys.argv)
|
Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty
|
Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty
|
Python
|
apache-2.0
|
ollie314/browserscope,shinyChen/browserscope,image72/browserscope,ollie314/browserscope,githubashto/browserscope,githubashto/browserscope,shinyChen/browserscope,image72/browserscope,githubashto/browserscope,githubashto/browserscope,shinyChen/browserscope,ollie314/browserscope,shinyChen/browserscope,image72/browserscope,image72/browserscope,ollie314/browserscope
|
Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty
|
#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk datastore changes."""
__author__ = 'slamm@google.com (Stephen Lamm)'
import getopt
import getpass
import os
import simplejson
import sys
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('%s/..' % DIR_PATH)
from appengine_tools import appengine_rpc
UPDATER_URL_PATH = '/admin/update_result_parents'
class ResultUpdater(object):
def __init__(self, host, path, user, bookmark=None):
self.path = path
self.user = user
user_agent = None
# TODO: figure out a value for 'source'.
# Doc says, "The source to specify in authentication requests."
source = ''
self.rpc_server = appengine_rpc.HttpRpcServer(
host, self.GetCredentials, user_agent, source, save_cookies=True)
def GetCredentials(self):
# TODO: Grab email/password from config
return self.user, getpass.getpass('Password for %s: ' % self.user)
def Send(self, bookmark, total_scanned, total_updated):
response_data = self.rpc_server.Send(self.path, simplejson.dumps(
(bookmark, total_scanned, total_updated)))
return simplejson.loads(response_data)
def main(argv):
options, args = getopt.getopt(
argv[1:],
'h:u:',
['host=', 'gae_user='])
host = None
gae_user = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-u', '--gae_user'):
gae_user = option_value
updater = ResultUpdater(host, UPDATER_URL_PATH, user=gae_user)
bookmark = None
total_scanned = 0
total_updated = 0
while 1:
print 'Update batch: %s (total_scanned=%s, total_updated=%s)' % (
bookmark or 'no_bookmark', total_scanned, total_updated)
bookmark, total_scanned, total_updated = updater.Send(
bookmark, total_scanned, total_updated)
if not bookmark:
break
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty<commit_after>
|
#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk datastore changes."""
__author__ = 'slamm@google.com (Stephen Lamm)'
import getopt
import getpass
import os
import simplejson
import sys
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('%s/..' % DIR_PATH)
from appengine_tools import appengine_rpc
UPDATER_URL_PATH = '/admin/update_result_parents'
class ResultUpdater(object):
def __init__(self, host, path, user, bookmark=None):
self.path = path
self.user = user
user_agent = None
# TODO: figure out a value for 'source'.
# Doc says, "The source to specify in authentication requests."
source = ''
self.rpc_server = appengine_rpc.HttpRpcServer(
host, self.GetCredentials, user_agent, source, save_cookies=True)
def GetCredentials(self):
# TODO: Grab email/password from config
return self.user, getpass.getpass('Password for %s: ' % self.user)
def Send(self, bookmark, total_scanned, total_updated):
response_data = self.rpc_server.Send(self.path, simplejson.dumps(
(bookmark, total_scanned, total_updated)))
return simplejson.loads(response_data)
def main(argv):
options, args = getopt.getopt(
argv[1:],
'h:u:',
['host=', 'gae_user='])
host = None
gae_user = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-u', '--gae_user'):
gae_user = option_value
updater = ResultUpdater(host, UPDATER_URL_PATH, user=gae_user)
bookmark = None
total_scanned = 0
total_updated = 0
while 1:
print 'Update batch: %s (total_scanned=%s, total_updated=%s)' % (
bookmark or 'no_bookmark', total_scanned, total_updated)
bookmark, total_scanned, total_updated = updater.Send(
bookmark, total_scanned, total_updated)
if not bookmark:
break
if __name__ == '__main__':
main(sys.argv)
|
Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk datastore changes."""
__author__ = 'slamm@google.com (Stephen Lamm)'
import getopt
import getpass
import os
import simplejson
import sys
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('%s/..' % DIR_PATH)
from appengine_tools import appengine_rpc
UPDATER_URL_PATH = '/admin/update_result_parents'
class ResultUpdater(object):
def __init__(self, host, path, user, bookmark=None):
self.path = path
self.user = user
user_agent = None
# TODO: figure out a value for 'source'.
# Doc says, "The source to specify in authentication requests."
source = ''
self.rpc_server = appengine_rpc.HttpRpcServer(
host, self.GetCredentials, user_agent, source, save_cookies=True)
def GetCredentials(self):
# TODO: Grab email/password from config
return self.user, getpass.getpass('Password for %s: ' % self.user)
def Send(self, bookmark, total_scanned, total_updated):
response_data = self.rpc_server.Send(self.path, simplejson.dumps(
(bookmark, total_scanned, total_updated)))
return simplejson.loads(response_data)
def main(argv):
options, args = getopt.getopt(
argv[1:],
'h:u:',
['host=', 'gae_user='])
host = None
gae_user = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-u', '--gae_user'):
gae_user = option_value
updater = ResultUpdater(host, UPDATER_URL_PATH, user=gae_user)
bookmark = None
total_scanned = 0
total_updated = 0
while 1:
print 'Update batch: %s (total_scanned=%s, total_updated=%s)' % (
bookmark or 'no_bookmark', total_scanned, total_updated)
bookmark, total_scanned, total_updated = updater.Send(
bookmark, total_scanned, total_updated)
if not bookmark:
break
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a client to migrate from ResultParent.user_agent_list to ResultParent.user_agent_pretty<commit_after>#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License')
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bulk datastore changes."""
__author__ = 'slamm@google.com (Stephen Lamm)'
import getopt
import getpass
import os
import simplejson
import sys
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('%s/..' % DIR_PATH)
from appengine_tools import appengine_rpc
UPDATER_URL_PATH = '/admin/update_result_parents'
class ResultUpdater(object):
def __init__(self, host, path, user, bookmark=None):
self.path = path
self.user = user
user_agent = None
# TODO: figure out a value for 'source'.
# Doc says, "The source to specify in authentication requests."
source = ''
self.rpc_server = appengine_rpc.HttpRpcServer(
host, self.GetCredentials, user_agent, source, save_cookies=True)
def GetCredentials(self):
# TODO: Grab email/password from config
return self.user, getpass.getpass('Password for %s: ' % self.user)
def Send(self, bookmark, total_scanned, total_updated):
response_data = self.rpc_server.Send(self.path, simplejson.dumps(
(bookmark, total_scanned, total_updated)))
return simplejson.loads(response_data)
def main(argv):
options, args = getopt.getopt(
argv[1:],
'h:u:',
['host=', 'gae_user='])
host = None
gae_user = None
for option_key, option_value in options:
if option_key in ('-h', '--host'):
host = option_value
elif option_key in ('-u', '--gae_user'):
gae_user = option_value
updater = ResultUpdater(host, UPDATER_URL_PATH, user=gae_user)
bookmark = None
total_scanned = 0
total_updated = 0
while 1:
print 'Update batch: %s (total_scanned=%s, total_updated=%s)' % (
bookmark or 'no_bookmark', total_scanned, total_updated)
bookmark, total_scanned, total_updated = updater.Send(
bookmark, total_scanned, total_updated)
if not bookmark:
break
if __name__ == '__main__':
main(sys.argv)
|
|
3489deed854ab6c5448c5a28955751a6a1ec134e
|
test/jpypetest/test_zzz.py
|
test/jpypetest/test_zzz.py
|
import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
jpype.shutdownJVM()
|
Add shutdown at end to make sure that we dont crash on shutdown.
|
Add shutdown at end to make sure that we dont crash on shutdown.
|
Python
|
apache-2.0
|
originell/jpype,originell/jpype,originell/jpype,originell/jpype,originell/jpype
|
Add shutdown at end to make sure that we dont crash on shutdown.
|
import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
jpype.shutdownJVM()
|
<commit_before><commit_msg>Add shutdown at end to make sure that we dont crash on shutdown.<commit_after>
|
import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
jpype.shutdownJVM()
|
Add shutdown at end to make sure that we dont crash on shutdown.import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
jpype.shutdownJVM()
|
<commit_before><commit_msg>Add shutdown at end to make sure that we dont crash on shutdown.<commit_after>import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
jpype.shutdownJVM()
|
|
b023316bfa98a7b864044fe1274dbaed52990d77
|
tests/test_google_group.py
|
tests/test_google_group.py
|
import pytest
from web_test_base import *
class TestGoogleGroup(WebTestBase):
urls_to_get = [
"https://groups.google.com/forum/#!forum/iati-technical"
]
def test_locate_links(self, loaded_request):
"""
Tests that a page contains links to the defined URLs.
This test would ideally check to see whether there is a link to:
http://discuss.iatistandard.org
Google Groups, however, is loaded primarily with javascript.
As such, the link does not exist upon a simple load of the page.
This functionality could be added later with aSelenium test.
"""
pass
|
Add a test for the IATI Google Group
|
Add a test for the IATI Google Group
This tests the 200 response.
It would ideally be checked that there is a link to IATI Discuss.
This is not done because of the way the page loads.
This functionality could be added later with Selenium.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
Add a test for the IATI Google Group
This tests the 200 response.
It would ideally be checked that there is a link to IATI Discuss.
This is not done because of the way the page loads.
This functionality could be added later with Selenium.
|
import pytest
from web_test_base import *
class TestGoogleGroup(WebTestBase):
urls_to_get = [
"https://groups.google.com/forum/#!forum/iati-technical"
]
def test_locate_links(self, loaded_request):
"""
Tests that a page contains links to the defined URLs.
This test would ideally check to see whether there is a link to:
http://discuss.iatistandard.org
Google Groups, however, is loaded primarily with javascript.
As such, the link does not exist upon a simple load of the page.
This functionality could be added later with aSelenium test.
"""
pass
|
<commit_before><commit_msg>Add a test for the IATI Google Group
This tests the 200 response.
It would ideally be checked that there is a link to IATI Discuss.
This is not done because of the way the page loads.
This functionality could be added later with Selenium.<commit_after>
|
import pytest
from web_test_base import *
class TestGoogleGroup(WebTestBase):
urls_to_get = [
"https://groups.google.com/forum/#!forum/iati-technical"
]
def test_locate_links(self, loaded_request):
"""
Tests that a page contains links to the defined URLs.
This test would ideally check to see whether there is a link to:
http://discuss.iatistandard.org
Google Groups, however, is loaded primarily with javascript.
As such, the link does not exist upon a simple load of the page.
This functionality could be added later with aSelenium test.
"""
pass
|
Add a test for the IATI Google Group
This tests the 200 response.
It would ideally be checked that there is a link to IATI Discuss.
This is not done because of the way the page loads.
This functionality could be added later with Selenium.import pytest
from web_test_base import *
class TestGoogleGroup(WebTestBase):
urls_to_get = [
"https://groups.google.com/forum/#!forum/iati-technical"
]
def test_locate_links(self, loaded_request):
"""
Tests that a page contains links to the defined URLs.
This test would ideally check to see whether there is a link to:
http://discuss.iatistandard.org
Google Groups, however, is loaded primarily with javascript.
As such, the link does not exist upon a simple load of the page.
This functionality could be added later with aSelenium test.
"""
pass
|
<commit_before><commit_msg>Add a test for the IATI Google Group
This tests the 200 response.
It would ideally be checked that there is a link to IATI Discuss.
This is not done because of the way the page loads.
This functionality could be added later with Selenium.<commit_after>import pytest
from web_test_base import *
class TestGoogleGroup(WebTestBase):
urls_to_get = [
"https://groups.google.com/forum/#!forum/iati-technical"
]
def test_locate_links(self, loaded_request):
"""
Tests that a page contains links to the defined URLs.
This test would ideally check to see whether there is a link to:
http://discuss.iatistandard.org
Google Groups, however, is loaded primarily with javascript.
As such, the link does not exist upon a simple load of the page.
This functionality could be added later with aSelenium test.
"""
pass
|
|
5101e5e6c11c37a1970e8f56359c62316444b49a
|
supriya/tools/synthdeftools/test/test_SynthDef___str__.py
|
supriya/tools/synthdeftools/test/test_SynthDef___str__.py
|
# -*- encoding: utf-8 -*-
from abjad.tools import systemtools
from supriya import synthdeftools
from supriya import ugentools
class TestCase(systemtools.TestCase):
def test_multi_value_parameters(self):
with synthdeftools.SynthDefBuilder(
amp=0.1,
freqs=[300, 400],
out=0,
) as builder:
sines = ugentools.SinOsc.ar(
frequency=builder['freqs'],
)
sines = ugentools.Mix.new(sines)
sines = sines * builder['amp']
ugentools.Out.ar(
bus=builder['out'],
source=sines,
)
synthdef = builder.build()
self.compare_strings(
'''
SynthDef ... {
0_Control[1:freqs[0]] -> 1_SinOsc[0:frequency]
const_0:0.0 -> 1_SinOsc[1:phase]
0_Control[2:freqs[1]] -> 2_SinOsc[0:frequency]
const_0:0.0 -> 2_SinOsc[1:phase]
1_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[0:left]
2_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[1:right]
3_BinaryOpUGen:ADDITION[0] -> 4_BinaryOpUGen:MULTIPLICATION[0:left]
0_Control[0:amp] -> 4_BinaryOpUGen:MULTIPLICATION[1:right]
0_Control[3:out] -> 5_Out[0:bus]
4_BinaryOpUGen:MULTIPLICATION[0] -> 5_Out[1:source]
}
''',
str(synthdef),
)
|
Add multi-value parameter SynthDef str() test.
|
Add multi-value parameter SynthDef str() test.
|
Python
|
mit
|
josiah-wolf-oberholtzer/supriya,Pulgama/supriya,Pulgama/supriya,Pulgama/supriya,Pulgama/supriya
|
Add multi-value parameter SynthDef str() test.
|
# -*- encoding: utf-8 -*-
from abjad.tools import systemtools
from supriya import synthdeftools
from supriya import ugentools
class TestCase(systemtools.TestCase):
def test_multi_value_parameters(self):
with synthdeftools.SynthDefBuilder(
amp=0.1,
freqs=[300, 400],
out=0,
) as builder:
sines = ugentools.SinOsc.ar(
frequency=builder['freqs'],
)
sines = ugentools.Mix.new(sines)
sines = sines * builder['amp']
ugentools.Out.ar(
bus=builder['out'],
source=sines,
)
synthdef = builder.build()
self.compare_strings(
'''
SynthDef ... {
0_Control[1:freqs[0]] -> 1_SinOsc[0:frequency]
const_0:0.0 -> 1_SinOsc[1:phase]
0_Control[2:freqs[1]] -> 2_SinOsc[0:frequency]
const_0:0.0 -> 2_SinOsc[1:phase]
1_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[0:left]
2_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[1:right]
3_BinaryOpUGen:ADDITION[0] -> 4_BinaryOpUGen:MULTIPLICATION[0:left]
0_Control[0:amp] -> 4_BinaryOpUGen:MULTIPLICATION[1:right]
0_Control[3:out] -> 5_Out[0:bus]
4_BinaryOpUGen:MULTIPLICATION[0] -> 5_Out[1:source]
}
''',
str(synthdef),
)
|
<commit_before><commit_msg>Add multi-value parameter SynthDef str() test.<commit_after>
|
# -*- encoding: utf-8 -*-
from abjad.tools import systemtools
from supriya import synthdeftools
from supriya import ugentools
class TestCase(systemtools.TestCase):
def test_multi_value_parameters(self):
with synthdeftools.SynthDefBuilder(
amp=0.1,
freqs=[300, 400],
out=0,
) as builder:
sines = ugentools.SinOsc.ar(
frequency=builder['freqs'],
)
sines = ugentools.Mix.new(sines)
sines = sines * builder['amp']
ugentools.Out.ar(
bus=builder['out'],
source=sines,
)
synthdef = builder.build()
self.compare_strings(
'''
SynthDef ... {
0_Control[1:freqs[0]] -> 1_SinOsc[0:frequency]
const_0:0.0 -> 1_SinOsc[1:phase]
0_Control[2:freqs[1]] -> 2_SinOsc[0:frequency]
const_0:0.0 -> 2_SinOsc[1:phase]
1_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[0:left]
2_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[1:right]
3_BinaryOpUGen:ADDITION[0] -> 4_BinaryOpUGen:MULTIPLICATION[0:left]
0_Control[0:amp] -> 4_BinaryOpUGen:MULTIPLICATION[1:right]
0_Control[3:out] -> 5_Out[0:bus]
4_BinaryOpUGen:MULTIPLICATION[0] -> 5_Out[1:source]
}
''',
str(synthdef),
)
|
Add multi-value parameter SynthDef str() test.# -*- encoding: utf-8 -*-
from abjad.tools import systemtools
from supriya import synthdeftools
from supriya import ugentools
class TestCase(systemtools.TestCase):
def test_multi_value_parameters(self):
with synthdeftools.SynthDefBuilder(
amp=0.1,
freqs=[300, 400],
out=0,
) as builder:
sines = ugentools.SinOsc.ar(
frequency=builder['freqs'],
)
sines = ugentools.Mix.new(sines)
sines = sines * builder['amp']
ugentools.Out.ar(
bus=builder['out'],
source=sines,
)
synthdef = builder.build()
self.compare_strings(
'''
SynthDef ... {
0_Control[1:freqs[0]] -> 1_SinOsc[0:frequency]
const_0:0.0 -> 1_SinOsc[1:phase]
0_Control[2:freqs[1]] -> 2_SinOsc[0:frequency]
const_0:0.0 -> 2_SinOsc[1:phase]
1_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[0:left]
2_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[1:right]
3_BinaryOpUGen:ADDITION[0] -> 4_BinaryOpUGen:MULTIPLICATION[0:left]
0_Control[0:amp] -> 4_BinaryOpUGen:MULTIPLICATION[1:right]
0_Control[3:out] -> 5_Out[0:bus]
4_BinaryOpUGen:MULTIPLICATION[0] -> 5_Out[1:source]
}
''',
str(synthdef),
)
|
<commit_before><commit_msg>Add multi-value parameter SynthDef str() test.<commit_after># -*- encoding: utf-8 -*-
from abjad.tools import systemtools
from supriya import synthdeftools
from supriya import ugentools
class TestCase(systemtools.TestCase):
def test_multi_value_parameters(self):
with synthdeftools.SynthDefBuilder(
amp=0.1,
freqs=[300, 400],
out=0,
) as builder:
sines = ugentools.SinOsc.ar(
frequency=builder['freqs'],
)
sines = ugentools.Mix.new(sines)
sines = sines * builder['amp']
ugentools.Out.ar(
bus=builder['out'],
source=sines,
)
synthdef = builder.build()
self.compare_strings(
'''
SynthDef ... {
0_Control[1:freqs[0]] -> 1_SinOsc[0:frequency]
const_0:0.0 -> 1_SinOsc[1:phase]
0_Control[2:freqs[1]] -> 2_SinOsc[0:frequency]
const_0:0.0 -> 2_SinOsc[1:phase]
1_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[0:left]
2_SinOsc[0] -> 3_BinaryOpUGen:ADDITION[1:right]
3_BinaryOpUGen:ADDITION[0] -> 4_BinaryOpUGen:MULTIPLICATION[0:left]
0_Control[0:amp] -> 4_BinaryOpUGen:MULTIPLICATION[1:right]
0_Control[3:out] -> 5_Out[0:bus]
4_BinaryOpUGen:MULTIPLICATION[0] -> 5_Out[1:source]
}
''',
str(synthdef),
)
|
|
c428af7bce479fdcde34ce0997d41573861bf3e5
|
scripts/count_duplicates.py
|
scripts/count_duplicates.py
|
# -*- coding: utf-8 -*-
import pandas as pd
all_df = None
for filename in ["dentistes.refined.csv",
"infirmiers.refined.csv",
"medecins_exploitables.refined.csv",
"medecins_inexploitables.refined.csv",
"pharmaciens.refined.csv",
"transparencesante_avantages.refined.csv",
"sagefemmes.refined.csv",
"transparencesante_conventions.refined.csv"]:
df = pd.read_csv("data/refined/%s" % filename, dtype=object, encoding='utf-8', usecols=["DECL_TYPE", "ORIGIN", "LABO", "BENEF_PS_QUALITE_NOM_PRENOM", "DECL_CONV_DATE", "DECL_AVANT_DATE", "DECL_AVANT_MONTANT"])
if all_df is None:
all_df = df
else:
all_df = all_df.append(df)
print "Number of duplicated rows : " % all_df.duplicated().sum()
|
Add script to count duplicated data
|
Add script to count duplicated data
|
Python
|
agpl-3.0
|
regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data
|
Add script to count duplicated data
|
# -*- coding: utf-8 -*-
import pandas as pd
all_df = None
for filename in ["dentistes.refined.csv",
"infirmiers.refined.csv",
"medecins_exploitables.refined.csv",
"medecins_inexploitables.refined.csv",
"pharmaciens.refined.csv",
"transparencesante_avantages.refined.csv",
"sagefemmes.refined.csv",
"transparencesante_conventions.refined.csv"]:
df = pd.read_csv("data/refined/%s" % filename, dtype=object, encoding='utf-8', usecols=["DECL_TYPE", "ORIGIN", "LABO", "BENEF_PS_QUALITE_NOM_PRENOM", "DECL_CONV_DATE", "DECL_AVANT_DATE", "DECL_AVANT_MONTANT"])
if all_df is None:
all_df = df
else:
all_df = all_df.append(df)
print "Number of duplicated rows : " % all_df.duplicated().sum()
|
<commit_before><commit_msg>Add script to count duplicated data<commit_after>
|
# -*- coding: utf-8 -*-
import pandas as pd
all_df = None
for filename in ["dentistes.refined.csv",
"infirmiers.refined.csv",
"medecins_exploitables.refined.csv",
"medecins_inexploitables.refined.csv",
"pharmaciens.refined.csv",
"transparencesante_avantages.refined.csv",
"sagefemmes.refined.csv",
"transparencesante_conventions.refined.csv"]:
df = pd.read_csv("data/refined/%s" % filename, dtype=object, encoding='utf-8', usecols=["DECL_TYPE", "ORIGIN", "LABO", "BENEF_PS_QUALITE_NOM_PRENOM", "DECL_CONV_DATE", "DECL_AVANT_DATE", "DECL_AVANT_MONTANT"])
if all_df is None:
all_df = df
else:
all_df = all_df.append(df)
print "Number of duplicated rows : " % all_df.duplicated().sum()
|
Add script to count duplicated data# -*- coding: utf-8 -*-
import pandas as pd
all_df = None
for filename in ["dentistes.refined.csv",
"infirmiers.refined.csv",
"medecins_exploitables.refined.csv",
"medecins_inexploitables.refined.csv",
"pharmaciens.refined.csv",
"transparencesante_avantages.refined.csv",
"sagefemmes.refined.csv",
"transparencesante_conventions.refined.csv"]:
df = pd.read_csv("data/refined/%s" % filename, dtype=object, encoding='utf-8', usecols=["DECL_TYPE", "ORIGIN", "LABO", "BENEF_PS_QUALITE_NOM_PRENOM", "DECL_CONV_DATE", "DECL_AVANT_DATE", "DECL_AVANT_MONTANT"])
if all_df is None:
all_df = df
else:
all_df = all_df.append(df)
print "Number of duplicated rows : " % all_df.duplicated().sum()
|
<commit_before><commit_msg>Add script to count duplicated data<commit_after># -*- coding: utf-8 -*-
import pandas as pd
all_df = None
for filename in ["dentistes.refined.csv",
"infirmiers.refined.csv",
"medecins_exploitables.refined.csv",
"medecins_inexploitables.refined.csv",
"pharmaciens.refined.csv",
"transparencesante_avantages.refined.csv",
"sagefemmes.refined.csv",
"transparencesante_conventions.refined.csv"]:
df = pd.read_csv("data/refined/%s" % filename, dtype=object, encoding='utf-8', usecols=["DECL_TYPE", "ORIGIN", "LABO", "BENEF_PS_QUALITE_NOM_PRENOM", "DECL_CONV_DATE", "DECL_AVANT_DATE", "DECL_AVANT_MONTANT"])
if all_df is None:
all_df = df
else:
all_df = all_df.append(df)
print "Number of duplicated rows : " % all_df.duplicated().sum()
|
|
a71314962f61dceeda807d3bfa93abc2a2822eda
|
tests/py/test_transactional_emails.py
|
tests/py/test_transactional_emails.py
|
import mock
from gratipay.models.participant import Participant
from gratipay.models.account_elsewhere import AccountElsewhere
from gratipay.testing import Harness
from gratipay.testing.emails import get_last_email
class TestTransactionalEmails(Harness):
@mock.patch.object(Participant._mailer.messages, 'send')
def test_opt_in_sends_notifications_to_patrons(self, mailer):
alice = self.make_elsewhere('twitter', 1, 'alice')
bob = self.make_participant('bob', claimed_time='now', email_address='bob@gmail.com')
dan = self.make_participant('dan', claimed_time='now', email_address='dan@gmail.com')
self.make_participant('roy', claimed_time='now', email_address='roy@gmail.com', notify_on_opt_in=False)
bob.set_tip_to(alice.participant.username, '100')
dan.set_tip_to(alice.participant.username, '100')
alice = AccountElsewhere.from_user_name('twitter', 'alice')
alice.opt_in('alice')
assert mailer.call_count == 2
last_email = get_last_email(mailer)
assert last_email['to'] == 'dan@gmail.com'
expected = "You had pledged to alice. They've just joined Gratipay!"
assert expected in last_email['message_text']
|
Add tests for transactional emails
|
Add tests for transactional emails
|
Python
|
mit
|
gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com
|
Add tests for transactional emails
|
import mock
from gratipay.models.participant import Participant
from gratipay.models.account_elsewhere import AccountElsewhere
from gratipay.testing import Harness
from gratipay.testing.emails import get_last_email
class TestTransactionalEmails(Harness):
@mock.patch.object(Participant._mailer.messages, 'send')
def test_opt_in_sends_notifications_to_patrons(self, mailer):
alice = self.make_elsewhere('twitter', 1, 'alice')
bob = self.make_participant('bob', claimed_time='now', email_address='bob@gmail.com')
dan = self.make_participant('dan', claimed_time='now', email_address='dan@gmail.com')
self.make_participant('roy', claimed_time='now', email_address='roy@gmail.com', notify_on_opt_in=False)
bob.set_tip_to(alice.participant.username, '100')
dan.set_tip_to(alice.participant.username, '100')
alice = AccountElsewhere.from_user_name('twitter', 'alice')
alice.opt_in('alice')
assert mailer.call_count == 2
last_email = get_last_email(mailer)
assert last_email['to'] == 'dan@gmail.com'
expected = "You had pledged to alice. They've just joined Gratipay!"
assert expected in last_email['message_text']
|
<commit_before><commit_msg>Add tests for transactional emails<commit_after>
|
import mock
from gratipay.models.participant import Participant
from gratipay.models.account_elsewhere import AccountElsewhere
from gratipay.testing import Harness
from gratipay.testing.emails import get_last_email
class TestTransactionalEmails(Harness):
@mock.patch.object(Participant._mailer.messages, 'send')
def test_opt_in_sends_notifications_to_patrons(self, mailer):
alice = self.make_elsewhere('twitter', 1, 'alice')
bob = self.make_participant('bob', claimed_time='now', email_address='bob@gmail.com')
dan = self.make_participant('dan', claimed_time='now', email_address='dan@gmail.com')
self.make_participant('roy', claimed_time='now', email_address='roy@gmail.com', notify_on_opt_in=False)
bob.set_tip_to(alice.participant.username, '100')
dan.set_tip_to(alice.participant.username, '100')
alice = AccountElsewhere.from_user_name('twitter', 'alice')
alice.opt_in('alice')
assert mailer.call_count == 2
last_email = get_last_email(mailer)
assert last_email['to'] == 'dan@gmail.com'
expected = "You had pledged to alice. They've just joined Gratipay!"
assert expected in last_email['message_text']
|
Add tests for transactional emailsimport mock
from gratipay.models.participant import Participant
from gratipay.models.account_elsewhere import AccountElsewhere
from gratipay.testing import Harness
from gratipay.testing.emails import get_last_email
class TestTransactionalEmails(Harness):
@mock.patch.object(Participant._mailer.messages, 'send')
def test_opt_in_sends_notifications_to_patrons(self, mailer):
alice = self.make_elsewhere('twitter', 1, 'alice')
bob = self.make_participant('bob', claimed_time='now', email_address='bob@gmail.com')
dan = self.make_participant('dan', claimed_time='now', email_address='dan@gmail.com')
self.make_participant('roy', claimed_time='now', email_address='roy@gmail.com', notify_on_opt_in=False)
bob.set_tip_to(alice.participant.username, '100')
dan.set_tip_to(alice.participant.username, '100')
alice = AccountElsewhere.from_user_name('twitter', 'alice')
alice.opt_in('alice')
assert mailer.call_count == 2
last_email = get_last_email(mailer)
assert last_email['to'] == 'dan@gmail.com'
expected = "You had pledged to alice. They've just joined Gratipay!"
assert expected in last_email['message_text']
|
<commit_before><commit_msg>Add tests for transactional emails<commit_after>import mock
from gratipay.models.participant import Participant
from gratipay.models.account_elsewhere import AccountElsewhere
from gratipay.testing import Harness
from gratipay.testing.emails import get_last_email
class TestTransactionalEmails(Harness):
@mock.patch.object(Participant._mailer.messages, 'send')
def test_opt_in_sends_notifications_to_patrons(self, mailer):
alice = self.make_elsewhere('twitter', 1, 'alice')
bob = self.make_participant('bob', claimed_time='now', email_address='bob@gmail.com')
dan = self.make_participant('dan', claimed_time='now', email_address='dan@gmail.com')
self.make_participant('roy', claimed_time='now', email_address='roy@gmail.com', notify_on_opt_in=False)
bob.set_tip_to(alice.participant.username, '100')
dan.set_tip_to(alice.participant.username, '100')
alice = AccountElsewhere.from_user_name('twitter', 'alice')
alice.opt_in('alice')
assert mailer.call_count == 2
last_email = get_last_email(mailer)
assert last_email['to'] == 'dan@gmail.com'
expected = "You had pledged to alice. They've just joined Gratipay!"
assert expected in last_email['message_text']
|
|
b2f95de3e88f785171666dafd442e32ca1ac47eb
|
djtweetar/bin/send.py
|
djtweetar/bin/send.py
|
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from socialregistration.models import TwitterProfile
from weathertracking.models import WeatherStation
from tweetar import *
def send_reports():
profiles = TwitterProfile.objects.filter(content_type=ContentType.objects.get_for_model(WeatherStation))
for profile in profiles:
conf = {
'station': profile.content_object.code,
'twitter_user': profile.screenname,
'oauth_consumer_key': settings.TWITTER_CONSUMER_KEY,
'oauth_consumer_secret': settings.TWITTER_CONSUMER_SECRET_KEY,
'access_token_key': profile.consumer_key,
'access_token_secret': profile.consumer_secret,
}
try:
retrieve_and_post(conf)
except urllib2.HTTPError:
pass
if __name__ == '__main__':
send_reports()
|
Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.
|
Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.
|
Python
|
bsd-3-clause
|
adamfast/django-tweetar
|
Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.
|
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from socialregistration.models import TwitterProfile
from weathertracking.models import WeatherStation
from tweetar import *
def send_reports():
profiles = TwitterProfile.objects.filter(content_type=ContentType.objects.get_for_model(WeatherStation))
for profile in profiles:
conf = {
'station': profile.content_object.code,
'twitter_user': profile.screenname,
'oauth_consumer_key': settings.TWITTER_CONSUMER_KEY,
'oauth_consumer_secret': settings.TWITTER_CONSUMER_SECRET_KEY,
'access_token_key': profile.consumer_key,
'access_token_secret': profile.consumer_secret,
}
try:
retrieve_and_post(conf)
except urllib2.HTTPError:
pass
if __name__ == '__main__':
send_reports()
|
<commit_before><commit_msg>Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.<commit_after>
|
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from socialregistration.models import TwitterProfile
from weathertracking.models import WeatherStation
from tweetar import *
def send_reports():
profiles = TwitterProfile.objects.filter(content_type=ContentType.objects.get_for_model(WeatherStation))
for profile in profiles:
conf = {
'station': profile.content_object.code,
'twitter_user': profile.screenname,
'oauth_consumer_key': settings.TWITTER_CONSUMER_KEY,
'oauth_consumer_secret': settings.TWITTER_CONSUMER_SECRET_KEY,
'access_token_key': profile.consumer_key,
'access_token_secret': profile.consumer_secret,
}
try:
retrieve_and_post(conf)
except urllib2.HTTPError:
pass
if __name__ == '__main__':
send_reports()
|
Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from socialregistration.models import TwitterProfile
from weathertracking.models import WeatherStation
from tweetar import *
def send_reports():
profiles = TwitterProfile.objects.filter(content_type=ContentType.objects.get_for_model(WeatherStation))
for profile in profiles:
conf = {
'station': profile.content_object.code,
'twitter_user': profile.screenname,
'oauth_consumer_key': settings.TWITTER_CONSUMER_KEY,
'oauth_consumer_secret': settings.TWITTER_CONSUMER_SECRET_KEY,
'access_token_key': profile.consumer_key,
'access_token_secret': profile.consumer_secret,
}
try:
retrieve_and_post(conf)
except urllib2.HTTPError:
pass
if __name__ == '__main__':
send_reports()
|
<commit_before><commit_msg>Add a script to go through all social credentials available and post the weather. This script will need to be cron'ed.<commit_after>from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from socialregistration.models import TwitterProfile
from weathertracking.models import WeatherStation
from tweetar import *
def send_reports():
profiles = TwitterProfile.objects.filter(content_type=ContentType.objects.get_for_model(WeatherStation))
for profile in profiles:
conf = {
'station': profile.content_object.code,
'twitter_user': profile.screenname,
'oauth_consumer_key': settings.TWITTER_CONSUMER_KEY,
'oauth_consumer_secret': settings.TWITTER_CONSUMER_SECRET_KEY,
'access_token_key': profile.consumer_key,
'access_token_secret': profile.consumer_secret,
}
try:
retrieve_and_post(conf)
except urllib2.HTTPError:
pass
if __name__ == '__main__':
send_reports()
|
|
5e849bfb766a79c688645175e90c1412ed516bf2
|
lms/djangoapps/django_comment_client/management/commands/get_discussion_link.py
|
lms/djangoapps/django_comment_client/management/commands/get_discussion_link.py
|
from django.core.management.base import BaseCommand, CommandError
from courseware.courses import get_course
class Command(BaseCommand):
args = "<course_id>"
def handle(self, *args, **options):
if not args:
raise CommandError("Course id not specified")
if len(args) > 1:
raise CommandError("Only one course id may be specifiied")
course_id = args[0]
try:
course = get_course(course_id)
except ValueError:
raise CommandError("Invalid course id: {}".format(course_id))
if course.discussion_link:
self.stdout.write(course.discussion_link)
|
Add command to get discussion_link for a course
|
Add command to get discussion_link for a course
This will be used to detect courses that are linking to Stack Exchange
sites so we can handle them specially when generating data dumps.
|
Python
|
agpl-3.0
|
hmcmooc/muddx-platform,jbzdak/edx-platform,chauhanhardik/populo,abdoosh00/edraak,kmoocdev2/edx-platform,gymnasium/edx-platform,cpennington/edx-platform,shashank971/edx-platform,EDUlib/edx-platform,motion2015/edx-platform,cpennington/edx-platform,devs1991/test_edx_docmode,teltek/edx-platform,doismellburning/edx-platform,mtlchun/edx,IONISx/edx-platform,marcore/edx-platform,yokose-ks/edx-platform,motion2015/edx-platform,JioEducation/edx-platform,ahmedaljazzar/edx-platform,kxliugang/edx-platform,xingyepei/edx-platform,olexiim/edx-platform,leansoft/edx-platform,vasyarv/edx-platform,ahmadio/edx-platform,marcore/edx-platform,fly19890211/edx-platform,eduNEXT/edx-platform,devs1991/test_edx_docmode,etzhou/edx-platform,nttks/edx-platform,BehavioralInsightsTeam/edx-platform,Ayub-Khan/edx-platform,ESOedX/edx-platform,raccoongang/edx-platform,SivilTaram/edx-platform,utecuy/edx-platform,ZLLab-Mooc/edx-platform,dsajkl/123,rismalrv/edx-platform,edx/edx-platform,wwj718/edx-platform,zhenzhai/edx-platform,jazztpt/edx-platform,shubhdev/edx-platform,amir-qayyum-khan/edx-platform,inares/edx-platform,cyanna/edx-platform,olexiim/edx-platform,DefyVentures/edx-platform,dkarakats/edx-platform,J861449197/edx-platform,mcgachey/edx-platform,angelapper/edx-platform,jbzdak/edx-platform,alexthered/kienhoc-platform,vasyarv/edx-platform,gsehub/edx-platform,nanolearningllc/edx-platform-cypress,openfun/edx-platform,zhenzhai/edx-platform,ak2703/edx-platform,jbzdak/edx-platform,zubair-arbi/edx-platform,Shrhawk/edx-platform,doganov/edx-platform,hkawasaki/kawasaki-aio8-2,franosincic/edx-platform,teltek/edx-platform,UXE/local-edx,kmoocdev/edx-platform,bitifirefly/edx-platform,RPI-OPENEDX/edx-platform,knehez/edx-platform,xinjiguaike/edx-platform,amir-qayyum-khan/edx-platform,hamzehd/edx-platform,atsolakid/edx-platform,doismellburning/edx-platform,tanmaykm/edx-platform,cselis86/edx-platform,10clouds/edx-platform,edx/edx-platform,Stanford-Online/edx-platform,vikas1885/test1,shubhdev/openedx,rue89-tech/edx-platform,knehez/edx-platform,angelapper/edx-platform,Unow/edx-platform,hamzehd/edx-platform,B-MOOC/edx-platform,hkawasaki/kawasaki-aio8-0,angelapper/edx-platform,cognitiveclass/edx-platform,jamesblunt/edx-platform,defance/edx-platform,vasyarv/edx-platform,motion2015/edx-platform,dsajkl/reqiop,nanolearningllc/edx-platform-cypress,motion2015/a3,mitocw/edx-platform,auferack08/edx-platform,ak2703/edx-platform,playm2mboy/edx-platform,ampax/edx-platform-backup,olexiim/edx-platform,ubc/edx-platform,playm2mboy/edx-platform,beacloudgenius/edx-platform,angelapper/edx-platform,cyanna/edx-platform,hkawasaki/kawasaki-aio8-2,CredoReference/edx-platform,hamzehd/edx-platform,nikolas/edx-platform,tanmaykm/edx-platform,BehavioralInsightsTeam/edx-platform,nttks/edx-platform,hastexo/edx-platform,synergeticsedx/deployment-wipro,shashank971/edx-platform,ferabra/edx-platform,UOMx/edx-platform,Livit/Livit.Learn.EdX,utecuy/edx-platform,IndonesiaX/edx-platform,utecuy/edx-platform,eemirtekin/edx-platform,Shrhawk/edx-platform,motion2015/edx-platform,jamesblunt/edx-platform,Semi-global/edx-platform,gsehub/edx-platform,naresh21/synergetics-edx-platform,ahmedaljazzar/edx-platform,doganov/edx-platform,kamalx/edx-platform,cpennington/edx-platform,Unow/edx-platform,romain-li/edx-platform,cyanna/edx-platform,alu042/edx-platform,Edraak/edraak-platform,pepeportela/edx-platform,jbzdak/edx-platform,atsolakid/edx-platform,kursitet/edx-platform,longmen21/edx-platform,deepsrijit1105/edx-platform,rue89-tech/edx-platform,jswope00/griffinx,utecuy/edx-platform,Livit/Livit.Learn.EdX,doismellburning/edx-platform,olexiim/edx-platform,tiagochiavericosta/edx-platform,LICEF/edx-platform,cecep-edu/edx-platform,rismalrv/edx-platform,jonathan-beard/edx-platform,xingyepei/edx-platform,eduNEXT/edunext-platform,chand3040/cloud_that,inares/edx-platform,sudheerchintala/LearnEraPlatForm,unicri/edx-platform,mahendra-r/edx-platform,jazztpt/edx-platform,ovnicraft/edx-platform,mjirayu/sit_academy,chudaol/edx-platform,4eek/edx-platform,bigdatauniversity/edx-platform,dsajkl/123,longmen21/edx-platform,MakeHer/edx-platform,chudaol/edx-platform,kamalx/edx-platform,WatanabeYasumasa/edx-platform,CredoReference/edx-platform,nttks/jenkins-test,y12uc231/edx-platform,antonve/s4-project-mooc,yokose-ks/edx-platform,UOMx/edx-platform,MSOpenTech/edx-platform,carsongee/edx-platform,hkawasaki/kawasaki-aio8-0,kursitet/edx-platform,LearnEra/LearnEraPlaftform,eestay/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,shurihell/testasia,morenopc/edx-platform,4eek/edx-platform,atsolakid/edx-platform,xinjiguaike/edx-platform,chrisndodge/edx-platform,ahmadiga/min_edx,xingyepei/edx-platform,bitifirefly/edx-platform,jruiperezv/ANALYSE,mbareta/edx-platform-ft,naresh21/synergetics-edx-platform,LearnEra/LearnEraPlaftform,motion2015/a3,lduarte1991/edx-platform,TeachAtTUM/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,motion2015/edx-platform,B-MOOC/edx-platform,kamalx/edx-platform,Lektorium-LLC/edx-platform,alexthered/kienhoc-platform,chudaol/edx-platform,philanthropy-u/edx-platform,itsjeyd/edx-platform,UOMx/edx-platform,nikolas/edx-platform,procangroup/edx-platform,zerobatu/edx-platform,nanolearningllc/edx-platform-cypress,OmarIthawi/edx-platform,martynovp/edx-platform,IndonesiaX/edx-platform,Edraak/circleci-edx-platform,iivic/BoiseStateX,devs1991/test_edx_docmode,pabloborrego93/edx-platform,hkawasaki/kawasaki-aio8-1,hamzehd/edx-platform,Edraak/edraak-platform,LICEF/edx-platform,jswope00/griffinx,abdoosh00/edraak,xuxiao19910803/edx,jamiefolsom/edx-platform,solashirai/edx-platform,Edraak/edx-platform,gsehub/edx-platform,nagyistoce/edx-platform,franosincic/edx-platform,jswope00/griffinx,Stanford-Online/edx-platform,nikolas/edx-platform,nikolas/edx-platform,Livit/Livit.Learn.EdX,ESOedX/edx-platform,itsjeyd/edx-platform,nagyistoce/edx-platform,vismartltd/edx-platform,kmoocdev/edx-platform,nttks/jenkins-test,MakeHer/edx-platform,cecep-edu/edx-platform,motion2015/a3,fly19890211/edx-platform,jjmiranda/edx-platform,antoviaque/edx-platform,dkarakats/edx-platform,ampax/edx-platform-backup,Semi-global/edx-platform,a-parhom/edx-platform,SravanthiSinha/edx-platform,zubair-arbi/edx-platform,kxliugang/edx-platform,jbzdak/edx-platform,Softmotions/edx-platform,nttks/edx-platform,JCBarahona/edX,polimediaupv/edx-platform,BehavioralInsightsTeam/edx-platform,mitocw/edx-platform,AkA84/edx-platform,eduNEXT/edunext-platform,edx-solutions/edx-platform,chudaol/edx-platform,chrisndodge/edx-platform,hmcmooc/muddx-platform,peterm-itr/edx-platform,motion2015/a3,sudheerchintala/LearnEraPlatForm,doganov/edx-platform,Shrhawk/edx-platform,msegado/edx-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,bdero/edx-platform,benpatterson/edx-platform,mcgachey/edx-platform,halvertoluke/edx-platform,tiagochiavericosta/edx-platform,jazkarta/edx-platform-for-isc,CourseTalk/edx-platform,jbassen/edx-platform,jazztpt/edx-platform,y12uc231/edx-platform,rismalrv/edx-platform,wwj718/edx-platform,EDUlib/edx-platform,chand3040/cloud_that,devs1991/test_edx_docmode,CourseTalk/edx-platform,Lektorium-LLC/edx-platform,longmen21/edx-platform,mushtaqak/edx-platform,dsajkl/reqiop,stvstnfrd/edx-platform,proversity-org/edx-platform,hmcmooc/muddx-platform,morenopc/edx-platform,unicri/edx-platform,DefyVentures/edx-platform,dcosentino/edx-platform,procangroup/edx-platform,caesar2164/edx-platform,don-github/edx-platform,ahmadio/edx-platform,arifsetiawan/edx-platform,ubc/edx-platform,ubc/edx-platform,dkarakats/edx-platform,halvertoluke/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmadiga/min_edx,naresh21/synergetics-edx-platform,martynovp/edx-platform,mjirayu/sit_academy,CredoReference/edx-platform,olexiim/edx-platform,xuxiao19910803/edx,zhenzhai/edx-platform,jonathan-beard/edx-platform,nttks/edx-platform,nanolearning/edx-platform,alexthered/kienhoc-platform,jruiperezv/ANALYSE,wwj718/edx-platform,prarthitm/edxplatform,DefyVentures/edx-platform,prarthitm/edxplatform,mahendra-r/edx-platform,ovnicraft/edx-platform,dsajkl/123,mahendra-r/edx-platform,kxliugang/edx-platform,Shrhawk/edx-platform,teltek/edx-platform,edry/edx-platform,naresh21/synergetics-edx-platform,nanolearningllc/edx-platform-cypress-2,arifsetiawan/edx-platform,zerobatu/edx-platform,knehez/edx-platform,JioEducation/edx-platform,rhndg/openedx,kursitet/edx-platform,OmarIthawi/edx-platform,waheedahmed/edx-platform,ampax/edx-platform,Endika/edx-platform,pomegranited/edx-platform,4eek/edx-platform,hkawasaki/kawasaki-aio8-1,UXE/local-edx,rhndg/openedx,DefyVentures/edx-platform,zadgroup/edx-platform,martynovp/edx-platform,jjmiranda/edx-platform,romain-li/edx-platform,bigdatauniversity/edx-platform,zofuthan/edx-platform,deepsrijit1105/edx-platform,zadgroup/edx-platform,martynovp/edx-platform,rhndg/openedx,wwj718/edx-platform,hkawasaki/kawasaki-aio8-1,leansoft/edx-platform,edx-solutions/edx-platform,Endika/edx-platform,hastexo/edx-platform,solashirai/edx-platform,amir-qayyum-khan/edx-platform,10clouds/edx-platform,Kalyzee/edx-platform,Edraak/edx-platform,4eek/edx-platform,bitifirefly/edx-platform,valtech-mooc/edx-platform,mahendra-r/edx-platform,EDUlib/edx-platform,cognitiveclass/edx-platform,kamalx/edx-platform,etzhou/edx-platform,JCBarahona/edX,don-github/edx-platform,jamiefolsom/edx-platform,jazkarta/edx-platform-for-isc,bigdatauniversity/edx-platform,inares/edx-platform,nagyistoce/edx-platform,RPI-OPENEDX/edx-platform,benpatterson/edx-platform,morenopc/edx-platform,mbareta/edx-platform-ft,CourseTalk/edx-platform,analyseuc3m/ANALYSE-v1,chrisndodge/edx-platform,andyzsf/edx,mcgachey/edx-platform,jzoldak/edx-platform,a-parhom/edx-platform,beni55/edx-platform,LearnEra/LearnEraPlaftform,Softmotions/edx-platform,shubhdev/edxOnBaadal,etzhou/edx-platform,shubhdev/openedx,B-MOOC/edx-platform,cecep-edu/edx-platform,solashirai/edx-platform,dsajkl/123,ubc/edx-platform,gymnasium/edx-platform,amir-qayyum-khan/edx-platform,louyihua/edx-platform,eemirtekin/edx-platform,ak2703/edx-platform,gymnasium/edx-platform,zhenzhai/edx-platform,vasyarv/edx-platform,shashank971/edx-platform,raccoongang/edx-platform,halvertoluke/edx-platform,jbassen/edx-platform,UOMx/edx-platform,prarthitm/edxplatform,kxliugang/edx-platform,arbrandes/edx-platform,nanolearning/edx-platform,ESOedX/edx-platform,appsembler/edx-platform,nanolearningllc/edx-platform-cypress-2,waheedahmed/edx-platform,jelugbo/tundex,shubhdev/edxOnBaadal,solashirai/edx-platform,bigdatauniversity/edx-platform,Softmotions/edx-platform,jazkarta/edx-platform,antoviaque/edx-platform,miptliot/edx-platform,peterm-itr/edx-platform,benpatterson/edx-platform,jamesblunt/edx-platform,chauhanhardik/populo,jazkarta/edx-platform-for-isc,WatanabeYasumasa/edx-platform,msegado/edx-platform,shubhdev/edxOnBaadal,beni55/edx-platform,kursitet/edx-platform,bitifirefly/edx-platform,playm2mboy/edx-platform,marcore/edx-platform,chauhanhardik/populo,ubc/edx-platform,appsembler/edx-platform,polimediaupv/edx-platform,zofuthan/edx-platform,cselis86/edx-platform,nanolearning/edx-platform,procangroup/edx-platform,beacloudgenius/edx-platform,zubair-arbi/edx-platform,edry/edx-platform,CredoReference/edx-platform,DNFcode/edx-platform,Edraak/edraak-platform,JCBarahona/edX,J861449197/edx-platform,dsajkl/reqiop,cpennington/edx-platform,Lektorium-LLC/edx-platform,WatanabeYasumasa/edx-platform,morenopc/edx-platform,eestay/edx-platform,shubhdev/edxOnBaadal,tiagochiavericosta/edx-platform,analyseuc3m/ANALYSE-v1,shabab12/edx-platform,louyihua/edx-platform,EDUlib/edx-platform,IONISx/edx-platform,chauhanhardik/populo_2,nanolearningllc/edx-platform-cypress-2,y12uc231/edx-platform,shurihell/testasia,jazkarta/edx-platform,Stanford-Online/edx-platform,chand3040/cloud_that,AkA84/edx-platform,jjmiranda/edx-platform,zhenzhai/edx-platform,simbs/edx-platform,appliedx/edx-platform,10clouds/edx-platform,iivic/BoiseStateX,jswope00/griffinx,zerobatu/edx-platform,AkA84/edx-platform,philanthropy-u/edx-platform,B-MOOC/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,yokose-ks/edx-platform,andyzsf/edx,cyanna/edx-platform,raccoongang/edx-platform,zofuthan/edx-platform,mcgachey/edx-platform,appsembler/edx-platform,Kalyzee/edx-platform,alu042/edx-platform,IONISx/edx-platform,ferabra/edx-platform,xinjiguaike/edx-platform,philanthropy-u/edx-platform,leansoft/edx-platform,Kalyzee/edx-platform,kmoocdev/edx-platform,Semi-global/edx-platform,adoosii/edx-platform,tiagochiavericosta/edx-platform,arifsetiawan/edx-platform,Edraak/circleci-edx-platform,abdoosh00/edraak,raccoongang/edx-platform,zerobatu/edx-platform,mtlchun/edx,rismalrv/edx-platform,hkawasaki/kawasaki-aio8-0,JCBarahona/edX,simbs/edx-platform,jswope00/griffinx,unicri/edx-platform,mushtaqak/edx-platform,Shrhawk/edx-platform,kmoocdev2/edx-platform,don-github/edx-platform,jamiefolsom/edx-platform,ahmadiga/min_edx,chauhanhardik/populo_2,Softmotions/edx-platform,mushtaqak/edx-platform,fly19890211/edx-platform,jonathan-beard/edx-platform,proversity-org/edx-platform,fintech-circle/edx-platform,utecuy/edx-platform,edx/edx-platform,kmoocdev/edx-platform,chand3040/cloud_that,shabab12/edx-platform,shurihell/testasia,jazkarta/edx-platform-for-isc,Edraak/edx-platform,tiagochiavericosta/edx-platform,SravanthiSinha/edx-platform,xuxiao19910803/edx,knehez/edx-platform,cselis86/edx-platform,hkawasaki/kawasaki-aio8-2,vismartltd/edx-platform,appliedx/edx-platform,nagyistoce/edx-platform,jonathan-beard/edx-platform,jazkarta/edx-platform,Semi-global/edx-platform,doismellburning/edx-platform,iivic/BoiseStateX,zofuthan/edx-platform,adoosii/edx-platform,zerobatu/edx-platform,beacloudgenius/edx-platform,pomegranited/edx-platform,nttks/jenkins-test,longmen21/edx-platform,edx/edx-platform,Ayub-Khan/edx-platform,alexthered/kienhoc-platform,zofuthan/edx-platform,rhndg/openedx,eestay/edx-platform,nttks/jenkins-test,CourseTalk/edx-platform,jbassen/edx-platform,RPI-OPENEDX/edx-platform,zubair-arbi/edx-platform,xuxiao19910803/edx-platform,unicri/edx-platform,kursitet/edx-platform,procangroup/edx-platform,JioEducation/edx-platform,playm2mboy/edx-platform,andyzsf/edx,edry/edx-platform,jazkarta/edx-platform,devs1991/test_edx_docmode,auferack08/edx-platform,shubhdev/edx-platform,wwj718/ANALYSE,jbassen/edx-platform,mtlchun/edx,RPI-OPENEDX/edx-platform,don-github/edx-platform,ampax/edx-platform-backup,polimediaupv/edx-platform,iivic/BoiseStateX,Endika/edx-platform,yokose-ks/edx-platform,jzoldak/edx-platform,waheedahmed/edx-platform,edx-solutions/edx-platform,adoosii/edx-platform,Lektorium-LLC/edx-platform,synergeticsedx/deployment-wipro,arifsetiawan/edx-platform,dcosentino/edx-platform,IndonesiaX/edx-platform,deepsrijit1105/edx-platform,pepeportela/edx-platform,polimediaupv/edx-platform,shubhdev/openedx,wwj718/edx-platform,shubhdev/edx-platform,DNFcode/edx-platform,msegado/edx-platform,alu042/edx-platform,fintech-circle/edx-platform,deepsrijit1105/edx-platform,peterm-itr/edx-platform,jolyonb/edx-platform,atsolakid/edx-platform,Unow/edx-platform,xuxiao19910803/edx-platform,jelugbo/tundex,SivilTaram/edx-platform,alexthered/kienhoc-platform,simbs/edx-platform,nanolearningllc/edx-platform-cypress,valtech-mooc/edx-platform,yokose-ks/edx-platform,jruiperezv/ANALYSE,sameetb-cuelogic/edx-platform-test,fintech-circle/edx-platform,DefyVentures/edx-platform,edx-solutions/edx-platform,etzhou/edx-platform,analyseuc3m/ANALYSE-v1,Ayub-Khan/edx-platform,shubhdev/edx-platform,4eek/edx-platform,dcosentino/edx-platform,LearnEra/LearnEraPlaftform,xuxiao19910803/edx-platform,LICEF/edx-platform,appliedx/edx-platform,vikas1885/test1,jelugbo/tundex,TeachAtTUM/edx-platform,xingyepei/edx-platform,Endika/edx-platform,motion2015/a3,pomegranited/edx-platform,mbareta/edx-platform-ft,benpatterson/edx-platform,eduNEXT/edx-platform,B-MOOC/edx-platform,eestay/edx-platform,eemirtekin/edx-platform,romain-li/edx-platform,cecep-edu/edx-platform,waheedahmed/edx-platform,miptliot/edx-platform,IndonesiaX/edx-platform,valtech-mooc/edx-platform,jruiperezv/ANALYSE,dsajkl/123,cyanna/edx-platform,vismartltd/edx-platform,ovnicraft/edx-platform,jbassen/edx-platform,dsajkl/reqiop,shubhdev/openedx,openfun/edx-platform,jamesblunt/edx-platform,alu042/edx-platform,dkarakats/edx-platform,Edraak/edraak-platform,vikas1885/test1,vikas1885/test1,zubair-arbi/edx-platform,gymnasium/edx-platform,franosincic/edx-platform,kmoocdev/edx-platform,MSOpenTech/edx-platform,tanmaykm/edx-platform,carsongee/edx-platform,TeachAtTUM/edx-platform,Edraak/circleci-edx-platform,J861449197/edx-platform,TeachAtTUM/edx-platform,leansoft/edx-platform,stvstnfrd/edx-platform,jruiperezv/ANALYSE,mbareta/edx-platform-ft,eduNEXT/edx-platform,bitifirefly/edx-platform,simbs/edx-platform,OmarIthawi/edx-platform,Unow/edx-platform,mjirayu/sit_academy,nagyistoce/edx-platform,beni55/edx-platform,Semi-global/edx-platform,zadgroup/edx-platform,shurihell/testasia,simbs/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,jamiefolsom/edx-platform,Edraak/edx-platform,jonathan-beard/edx-platform,nttks/edx-platform,vikas1885/test1,ovnicraft/edx-platform,beni55/edx-platform,J861449197/edx-platform,rue89-tech/edx-platform,ahmadiga/min_edx,DNFcode/edx-platform,synergeticsedx/deployment-wipro,franosincic/edx-platform,ahmedaljazzar/edx-platform,ZLLab-Mooc/edx-platform,xuxiao19910803/edx-platform,inares/edx-platform,MakeHer/edx-platform,DNFcode/edx-platform,polimediaupv/edx-platform,nttks/jenkins-test,peterm-itr/edx-platform,andyzsf/edx,y12uc231/edx-platform,jolyonb/edx-platform,ampax/edx-platform,ferabra/edx-platform,mitocw/edx-platform,hastexo/edx-platform,edry/edx-platform,etzhou/edx-platform,openfun/edx-platform,shabab12/edx-platform,hkawasaki/kawasaki-aio8-0,carsongee/edx-platform,defance/edx-platform,doganov/edx-platform,cselis86/edx-platform,antoviaque/edx-platform,nanolearning/edx-platform,bdero/edx-platform,louyihua/edx-platform,kmoocdev2/edx-platform,MSOpenTech/edx-platform,Ayub-Khan/edx-platform,louyihua/edx-platform,rue89-tech/edx-platform,jazztpt/edx-platform,eemirtekin/edx-platform,JCBarahona/edX,bigdatauniversity/edx-platform,BehavioralInsightsTeam/edx-platform,ak2703/edx-platform,MakeHer/edx-platform,auferack08/edx-platform,jolyonb/edx-platform,arbrandes/edx-platform,wwj718/ANALYSE,synergeticsedx/deployment-wipro,zadgroup/edx-platform,doganov/edx-platform,hastexo/edx-platform,WatanabeYasumasa/edx-platform,Edraak/edx-platform,teltek/edx-platform,fly19890211/edx-platform,Softmotions/edx-platform,sudheerchintala/LearnEraPlatForm,lduarte1991/edx-platform,xuxiao19910803/edx-platform,shubhdev/openedx,marcore/edx-platform,nikolas/edx-platform,UXE/local-edx,ferabra/edx-platform,Kalyzee/edx-platform,jolyonb/edx-platform,philanthropy-u/edx-platform,SravanthiSinha/edx-platform,msegado/edx-platform,vismartltd/edx-platform,fly19890211/edx-platform,SivilTaram/edx-platform,SivilTaram/edx-platform,mcgachey/edx-platform,ESOedX/edx-platform,jzoldak/edx-platform,mjirayu/sit_academy,jamiefolsom/edx-platform,romain-li/edx-platform,jazkarta/edx-platform-for-isc,fintech-circle/edx-platform,caesar2164/edx-platform,shashank971/edx-platform,bdero/edx-platform,pabloborrego93/edx-platform,appliedx/edx-platform,kamalx/edx-platform,ahmadio/edx-platform,hkawasaki/kawasaki-aio8-1,shurihell/testasia,shashank971/edx-platform,xuxiao19910803/edx,sudheerchintala/LearnEraPlatForm,tanmaykm/edx-platform,ahmedaljazzar/edx-platform,cecep-edu/edx-platform,antonve/s4-project-mooc,hmcmooc/muddx-platform,nanolearning/edx-platform,MSOpenTech/edx-platform,analyseuc3m/ANALYSE-v1,antoviaque/edx-platform,a-parhom/edx-platform,ahmadio/edx-platform,AkA84/edx-platform,appliedx/edx-platform,wwj718/ANALYSE,caesar2164/edx-platform,carsongee/edx-platform,xingyepei/edx-platform,IONISx/edx-platform,y12uc231/edx-platform,Edraak/circleci-edx-platform,sameetb-cuelogic/edx-platform-test,10clouds/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,MakeHer/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edx-platform,rhndg/openedx,jelugbo/tundex,xinjiguaike/edx-platform,jelugbo/tundex,halvertoluke/edx-platform,knehez/edx-platform,ak2703/edx-platform,playm2mboy/edx-platform,longmen21/edx-platform,eestay/edx-platform,mtlchun/edx,ZLLab-Mooc/edx-platform,chauhanhardik/populo_2,beacloudgenius/edx-platform,xinjiguaike/edx-platform,ampax/edx-platform-backup,caesar2164/edx-platform,beacloudgenius/edx-platform,MSOpenTech/edx-platform,shubhdev/edx-platform,cselis86/edx-platform,appsembler/edx-platform,chauhanhardik/populo_2,wwj718/ANALYSE,mushtaqak/edx-platform,edry/edx-platform,morenopc/edx-platform,chauhanhardik/populo,unicri/edx-platform,shabab12/edx-platform,antonve/s4-project-mooc,JioEducation/edx-platform,xuxiao19910803/edx,Edraak/circleci-edx-platform,UXE/local-edx,ferabra/edx-platform,jjmiranda/edx-platform,SravanthiSinha/edx-platform,rue89-tech/edx-platform,mjirayu/sit_academy,chauhanhardik/populo,eemirtekin/edx-platform,LICEF/edx-platform,valtech-mooc/edx-platform,antonve/s4-project-mooc,miptliot/edx-platform,AkA84/edx-platform,openfun/edx-platform,cognitiveclass/edx-platform,zadgroup/edx-platform,ampax/edx-platform-backup,iivic/BoiseStateX,ovnicraft/edx-platform,ZLLab-Mooc/edx-platform,kxliugang/edx-platform,IONISx/edx-platform,solashirai/edx-platform,hkawasaki/kawasaki-aio8-2,don-github/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,chauhanhardik/populo_2,waheedahmed/edx-platform,msegado/edx-platform,pomegranited/edx-platform,jazkarta/edx-platform,IndonesiaX/edx-platform,wwj718/ANALYSE,auferack08/edx-platform,proversity-org/edx-platform,cognitiveclass/edx-platform,itsjeyd/edx-platform,mtlchun/edx,chudaol/edx-platform,sameetb-cuelogic/edx-platform-test,ampax/edx-platform,vismartltd/edx-platform,Stanford-Online/edx-platform,jzoldak/edx-platform,dcosentino/edx-platform,stvstnfrd/edx-platform,Livit/Livit.Learn.EdX,pabloborrego93/edx-platform,sameetb-cuelogic/edx-platform-test,inares/edx-platform,adoosii/edx-platform,ahmadio/edx-platform,SivilTaram/edx-platform,mushtaqak/edx-platform,openfun/edx-platform,defance/edx-platform,RPI-OPENEDX/edx-platform,antonve/s4-project-mooc,sameetb-cuelogic/edx-platform-test,arifsetiawan/edx-platform,doismellburning/edx-platform,leansoft/edx-platform,arbrandes/edx-platform,nanolearningllc/edx-platform-cypress,benpatterson/edx-platform,SravanthiSinha/edx-platform,nanolearningllc/edx-platform-cypress-2,abdoosh00/edraak,martynovp/edx-platform,OmarIthawi/edx-platform,jazztpt/edx-platform,prarthitm/edxplatform,Kalyzee/edx-platform,franosincic/edx-platform,valtech-mooc/edx-platform,romain-li/edx-platform,jamesblunt/edx-platform,mitocw/edx-platform,chrisndodge/edx-platform,lduarte1991/edx-platform,rismalrv/edx-platform,mahendra-r/edx-platform,devs1991/test_edx_docmode,dkarakats/edx-platform,pomegranited/edx-platform,beni55/edx-platform,ampax/edx-platform,bdero/edx-platform,vasyarv/edx-platform,hamzehd/edx-platform,eduNEXT/edunext-platform,DNFcode/edx-platform,defance/edx-platform,J861449197/edx-platform,stvstnfrd/edx-platform,a-parhom/edx-platform,LICEF/edx-platform,ZLLab-Mooc/edx-platform,pepeportela/edx-platform,atsolakid/edx-platform,itsjeyd/edx-platform,proversity-org/edx-platform,dcosentino/edx-platform,eduNEXT/edunext-platform,pabloborrego93/edx-platform,chand3040/cloud_that
|
Add command to get discussion_link for a course
This will be used to detect courses that are linking to Stack Exchange
sites so we can handle them specially when generating data dumps.
|
from django.core.management.base import BaseCommand, CommandError
from courseware.courses import get_course
class Command(BaseCommand):
args = "<course_id>"
def handle(self, *args, **options):
if not args:
raise CommandError("Course id not specified")
if len(args) > 1:
raise CommandError("Only one course id may be specifiied")
course_id = args[0]
try:
course = get_course(course_id)
except ValueError:
raise CommandError("Invalid course id: {}".format(course_id))
if course.discussion_link:
self.stdout.write(course.discussion_link)
|
<commit_before><commit_msg>Add command to get discussion_link for a course
This will be used to detect courses that are linking to Stack Exchange
sites so we can handle them specially when generating data dumps.<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from courseware.courses import get_course
class Command(BaseCommand):
args = "<course_id>"
def handle(self, *args, **options):
if not args:
raise CommandError("Course id not specified")
if len(args) > 1:
raise CommandError("Only one course id may be specifiied")
course_id = args[0]
try:
course = get_course(course_id)
except ValueError:
raise CommandError("Invalid course id: {}".format(course_id))
if course.discussion_link:
self.stdout.write(course.discussion_link)
|
Add command to get discussion_link for a course
This will be used to detect courses that are linking to Stack Exchange
sites so we can handle them specially when generating data dumps.from django.core.management.base import BaseCommand, CommandError
from courseware.courses import get_course
class Command(BaseCommand):
args = "<course_id>"
def handle(self, *args, **options):
if not args:
raise CommandError("Course id not specified")
if len(args) > 1:
raise CommandError("Only one course id may be specifiied")
course_id = args[0]
try:
course = get_course(course_id)
except ValueError:
raise CommandError("Invalid course id: {}".format(course_id))
if course.discussion_link:
self.stdout.write(course.discussion_link)
|
<commit_before><commit_msg>Add command to get discussion_link for a course
This will be used to detect courses that are linking to Stack Exchange
sites so we can handle them specially when generating data dumps.<commit_after>from django.core.management.base import BaseCommand, CommandError
from courseware.courses import get_course
class Command(BaseCommand):
args = "<course_id>"
def handle(self, *args, **options):
if not args:
raise CommandError("Course id not specified")
if len(args) > 1:
raise CommandError("Only one course id may be specifiied")
course_id = args[0]
try:
course = get_course(course_id)
except ValueError:
raise CommandError("Invalid course id: {}".format(course_id))
if course.discussion_link:
self.stdout.write(course.discussion_link)
|
|
2420373ccdf99bc0019c7c9a09b3b3b8b6952c1b
|
utils/summary_downloader.py
|
utils/summary_downloader.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SummaryDownloader():
# base url for official schedule json page
SCHEDULE_URL_BASE = "http://statsapi.web.nhl.com/api/v1/schedule"
# url template for official json gamefeed page
JSON_GAME_FEED_URL_TEMPLATE = (
"http://statsapi.web.nhl.com/api/v1/game/%s/feed/live")
|
Add stub for summary downloader
|
Add stub for summary downloader
|
Python
|
mit
|
leaffan/pynhldb
|
Add stub for summary downloader
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SummaryDownloader():
# base url for official schedule json page
SCHEDULE_URL_BASE = "http://statsapi.web.nhl.com/api/v1/schedule"
# url template for official json gamefeed page
JSON_GAME_FEED_URL_TEMPLATE = (
"http://statsapi.web.nhl.com/api/v1/game/%s/feed/live")
|
<commit_before><commit_msg>Add stub for summary downloader<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SummaryDownloader():
# base url for official schedule json page
SCHEDULE_URL_BASE = "http://statsapi.web.nhl.com/api/v1/schedule"
# url template for official json gamefeed page
JSON_GAME_FEED_URL_TEMPLATE = (
"http://statsapi.web.nhl.com/api/v1/game/%s/feed/live")
|
Add stub for summary downloader#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SummaryDownloader():
# base url for official schedule json page
SCHEDULE_URL_BASE = "http://statsapi.web.nhl.com/api/v1/schedule"
# url template for official json gamefeed page
JSON_GAME_FEED_URL_TEMPLATE = (
"http://statsapi.web.nhl.com/api/v1/game/%s/feed/live")
|
<commit_before><commit_msg>Add stub for summary downloader<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SummaryDownloader():
# base url for official schedule json page
SCHEDULE_URL_BASE = "http://statsapi.web.nhl.com/api/v1/schedule"
# url template for official json gamefeed page
JSON_GAME_FEED_URL_TEMPLATE = (
"http://statsapi.web.nhl.com/api/v1/game/%s/feed/live")
|
|
78fefe27f9c5e77a3910a100637332a249a9d90f
|
IrrduinoServer/irrduinoserver/handlers/reportshandler.py
|
IrrduinoServer/irrduinoserver/handlers/reportshandler.py
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle reporting."""
from google.appengine.ext import webapp
from irrduinoserver import model
from irrduinoserver.utils import web as webutils
from irrduinoserver.utils import irrduino as irrduinoutils
class ReportsHandler(webapp.RequestHandler):
def get(self):
template_params = {}
template_params["zone_runs"] = list(model.ZoneRun.gql(""))
webutils.render_to_response(self, "reports.html", template_params)
def post(self):
"""Accept data from Irrduino.
Store it in the datastore and just respond "OK".
"""
try:
zone = int(self.request.get("zone"))
if (zone < irrduinoutils.MIN_ZONE or
zone > irrduinoutils.MAX_ZONE):
raise ValueError("zone out of range: %s" % zone)
runtime = int(self.request.get("runtime"))
if runtime <= 0:
raise ValueError("runtime out of range: %s" % runtime)
except (ValueError, TypeError), e:
webutils.error_response(msg="Invalid request: %r" % e)
else:
zone_run = model.ZoneRun(zone=zone, runtime_seconds=runtime)
zone_run.put()
self.response.out.write("OK")
|
Store info about zone runs
|
Store info about zone runs
|
Python
|
apache-2.0
|
jesuscorral/irrduino,rosope/irrduino,jesuscorral/irrduino,jecrespo/jecrespom-irrduino,angelmorinigo/irrduino,nlrivera/nickrivera12-test,Thibos/irrduino,angelmorinigo/irrduino,google/irrduino,jesuscorral/irrduino,jecrespo/jecrespom-irrduino,jesuscorral/irrduino,nlrivera/nickrivera12-test,Thibos/irrduino,nlrivera/nickrivera12-test,angelmorinigo/irrduino,jecrespo/jecrespom-irrduino,nlrivera/nickrivera12-test,jesuscorral/irrduino,rosope/irrduino,angelmorinigo/irrduino,rosope/irrduino,jecrespo/jecrespom-irrduino,google/irrduino,rosope/irrduino,jecrespo/jecrespom-irrduino,nlrivera/nickrivera12-test,Thibos/irrduino,Thibos/irrduino,google/irrduino,google/irrduino,Thibos/irrduino,angelmorinigo/irrduino,rosope/irrduino,google/irrduino
|
Store info about zone runs
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle reporting."""
from google.appengine.ext import webapp
from irrduinoserver import model
from irrduinoserver.utils import web as webutils
from irrduinoserver.utils import irrduino as irrduinoutils
class ReportsHandler(webapp.RequestHandler):
def get(self):
template_params = {}
template_params["zone_runs"] = list(model.ZoneRun.gql(""))
webutils.render_to_response(self, "reports.html", template_params)
def post(self):
"""Accept data from Irrduino.
Store it in the datastore and just respond "OK".
"""
try:
zone = int(self.request.get("zone"))
if (zone < irrduinoutils.MIN_ZONE or
zone > irrduinoutils.MAX_ZONE):
raise ValueError("zone out of range: %s" % zone)
runtime = int(self.request.get("runtime"))
if runtime <= 0:
raise ValueError("runtime out of range: %s" % runtime)
except (ValueError, TypeError), e:
webutils.error_response(msg="Invalid request: %r" % e)
else:
zone_run = model.ZoneRun(zone=zone, runtime_seconds=runtime)
zone_run.put()
self.response.out.write("OK")
|
<commit_before><commit_msg>Store info about zone runs<commit_after>
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle reporting."""
from google.appengine.ext import webapp
from irrduinoserver import model
from irrduinoserver.utils import web as webutils
from irrduinoserver.utils import irrduino as irrduinoutils
class ReportsHandler(webapp.RequestHandler):
def get(self):
template_params = {}
template_params["zone_runs"] = list(model.ZoneRun.gql(""))
webutils.render_to_response(self, "reports.html", template_params)
def post(self):
"""Accept data from Irrduino.
Store it in the datastore and just respond "OK".
"""
try:
zone = int(self.request.get("zone"))
if (zone < irrduinoutils.MIN_ZONE or
zone > irrduinoutils.MAX_ZONE):
raise ValueError("zone out of range: %s" % zone)
runtime = int(self.request.get("runtime"))
if runtime <= 0:
raise ValueError("runtime out of range: %s" % runtime)
except (ValueError, TypeError), e:
webutils.error_response(msg="Invalid request: %r" % e)
else:
zone_run = model.ZoneRun(zone=zone, runtime_seconds=runtime)
zone_run.put()
self.response.out.write("OK")
|
Store info about zone runs# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle reporting."""
from google.appengine.ext import webapp
from irrduinoserver import model
from irrduinoserver.utils import web as webutils
from irrduinoserver.utils import irrduino as irrduinoutils
class ReportsHandler(webapp.RequestHandler):
def get(self):
template_params = {}
template_params["zone_runs"] = list(model.ZoneRun.gql(""))
webutils.render_to_response(self, "reports.html", template_params)
def post(self):
"""Accept data from Irrduino.
Store it in the datastore and just respond "OK".
"""
try:
zone = int(self.request.get("zone"))
if (zone < irrduinoutils.MIN_ZONE or
zone > irrduinoutils.MAX_ZONE):
raise ValueError("zone out of range: %s" % zone)
runtime = int(self.request.get("runtime"))
if runtime <= 0:
raise ValueError("runtime out of range: %s" % runtime)
except (ValueError, TypeError), e:
webutils.error_response(msg="Invalid request: %r" % e)
else:
zone_run = model.ZoneRun(zone=zone, runtime_seconds=runtime)
zone_run.put()
self.response.out.write("OK")
|
<commit_before><commit_msg>Store info about zone runs<commit_after># Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle reporting."""
from google.appengine.ext import webapp
from irrduinoserver import model
from irrduinoserver.utils import web as webutils
from irrduinoserver.utils import irrduino as irrduinoutils
class ReportsHandler(webapp.RequestHandler):
def get(self):
template_params = {}
template_params["zone_runs"] = list(model.ZoneRun.gql(""))
webutils.render_to_response(self, "reports.html", template_params)
def post(self):
"""Accept data from Irrduino.
Store it in the datastore and just respond "OK".
"""
try:
zone = int(self.request.get("zone"))
if (zone < irrduinoutils.MIN_ZONE or
zone > irrduinoutils.MAX_ZONE):
raise ValueError("zone out of range: %s" % zone)
runtime = int(self.request.get("runtime"))
if runtime <= 0:
raise ValueError("runtime out of range: %s" % runtime)
except (ValueError, TypeError), e:
webutils.error_response(msg="Invalid request: %r" % e)
else:
zone_run = model.ZoneRun(zone=zone, runtime_seconds=runtime)
zone_run.put()
self.response.out.write("OK")
|
|
1299fde67d5ea3d210eff9e164db7d4c02b40099
|
alembic/versions/430dc4ed6dd6_add_shopping_categories.py
|
alembic/versions/430dc4ed6dd6_add_shopping_categories.py
|
"""Add shopping categories
Revision ID: 430dc4ed6dd6
Revises: 3a1dddd0c0f8
Create Date: 2014-12-24 09:55:10.993854
"""
# revision identifiers, used by Alembic.
revision = '430dc4ed6dd6'
down_revision = '3a1dddd0c0f8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
shopping_categories = table(
'shopping_category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Unicode(75), nullable=False),
sa.Column('daily_limit', sa.Integer, nullable=True)
)
op.bulk_insert(
shopping_categories,
[
{'id': 1, 'name': 'Clothing',
'daily_limit': 5},
{'id': 2, 'name': 'Household',
'daily_limit': 5},
{'id': 3, 'name': 'Shoes',
'daily_limit': 5},
{'id': 4, 'name': 'Baby',
'daily_limit': 5},
{'id': 5, 'name': 'Coats',
'daily_limit': 5},
{'id': 6, 'name': 'Other',
'daily_limit': 5}
])
def downgrade():
pass
|
Add shopping categories to the database
|
Add shopping categories to the database
|
Python
|
mit
|
jlutz777/FreeStore,jlutz777/FreeStore,jlutz777/FreeStore
|
Add shopping categories to the database
|
"""Add shopping categories
Revision ID: 430dc4ed6dd6
Revises: 3a1dddd0c0f8
Create Date: 2014-12-24 09:55:10.993854
"""
# revision identifiers, used by Alembic.
revision = '430dc4ed6dd6'
down_revision = '3a1dddd0c0f8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
shopping_categories = table(
'shopping_category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Unicode(75), nullable=False),
sa.Column('daily_limit', sa.Integer, nullable=True)
)
op.bulk_insert(
shopping_categories,
[
{'id': 1, 'name': 'Clothing',
'daily_limit': 5},
{'id': 2, 'name': 'Household',
'daily_limit': 5},
{'id': 3, 'name': 'Shoes',
'daily_limit': 5},
{'id': 4, 'name': 'Baby',
'daily_limit': 5},
{'id': 5, 'name': 'Coats',
'daily_limit': 5},
{'id': 6, 'name': 'Other',
'daily_limit': 5}
])
def downgrade():
pass
|
<commit_before><commit_msg>Add shopping categories to the database<commit_after>
|
"""Add shopping categories
Revision ID: 430dc4ed6dd6
Revises: 3a1dddd0c0f8
Create Date: 2014-12-24 09:55:10.993854
"""
# revision identifiers, used by Alembic.
revision = '430dc4ed6dd6'
down_revision = '3a1dddd0c0f8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
shopping_categories = table(
'shopping_category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Unicode(75), nullable=False),
sa.Column('daily_limit', sa.Integer, nullable=True)
)
op.bulk_insert(
shopping_categories,
[
{'id': 1, 'name': 'Clothing',
'daily_limit': 5},
{'id': 2, 'name': 'Household',
'daily_limit': 5},
{'id': 3, 'name': 'Shoes',
'daily_limit': 5},
{'id': 4, 'name': 'Baby',
'daily_limit': 5},
{'id': 5, 'name': 'Coats',
'daily_limit': 5},
{'id': 6, 'name': 'Other',
'daily_limit': 5}
])
def downgrade():
pass
|
Add shopping categories to the database"""Add shopping categories
Revision ID: 430dc4ed6dd6
Revises: 3a1dddd0c0f8
Create Date: 2014-12-24 09:55:10.993854
"""
# revision identifiers, used by Alembic.
revision = '430dc4ed6dd6'
down_revision = '3a1dddd0c0f8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
shopping_categories = table(
'shopping_category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Unicode(75), nullable=False),
sa.Column('daily_limit', sa.Integer, nullable=True)
)
op.bulk_insert(
shopping_categories,
[
{'id': 1, 'name': 'Clothing',
'daily_limit': 5},
{'id': 2, 'name': 'Household',
'daily_limit': 5},
{'id': 3, 'name': 'Shoes',
'daily_limit': 5},
{'id': 4, 'name': 'Baby',
'daily_limit': 5},
{'id': 5, 'name': 'Coats',
'daily_limit': 5},
{'id': 6, 'name': 'Other',
'daily_limit': 5}
])
def downgrade():
pass
|
<commit_before><commit_msg>Add shopping categories to the database<commit_after>"""Add shopping categories
Revision ID: 430dc4ed6dd6
Revises: 3a1dddd0c0f8
Create Date: 2014-12-24 09:55:10.993854
"""
# revision identifiers, used by Alembic.
revision = '430dc4ed6dd6'
down_revision = '3a1dddd0c0f8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
shopping_categories = table(
'shopping_category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Unicode(75), nullable=False),
sa.Column('daily_limit', sa.Integer, nullable=True)
)
op.bulk_insert(
shopping_categories,
[
{'id': 1, 'name': 'Clothing',
'daily_limit': 5},
{'id': 2, 'name': 'Household',
'daily_limit': 5},
{'id': 3, 'name': 'Shoes',
'daily_limit': 5},
{'id': 4, 'name': 'Baby',
'daily_limit': 5},
{'id': 5, 'name': 'Coats',
'daily_limit': 5},
{'id': 6, 'name': 'Other',
'daily_limit': 5}
])
def downgrade():
pass
|
|
1bb5bb76489acd65d61415b9ce82bbb35dc53de3
|
src/ggrc/migrations/versions/20150520103539_b0c3361797a_migrate_control_sections_to_.py
|
src/ggrc/migrations/versions/20150520103539_b0c3361797a_migrate_control_sections_to_.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate control_sections to relationships
Revision ID: b0c3361797a
Revises: 32e064034091
Create Date: 2015-05-20 10:35:39.561584
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b0c3361797a'
down_revision = '32e064034091'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT modified_by_id, created_at, updated_at, control_id as source_id,
'Control' as source_type, section_id as destination_id,
'Section' as destination_type, context_id
FROM control_sections;
"""
op.execute(sql)
op.drop_constraint(
'control_sections_ibfk_1', 'control_sections', type_='foreignkey')
op.drop_constraint(
'control_sections_ibfk_2', 'control_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'control_sections_ibfk_1',
'control_sections',
'controls',
['control_id'],
['id']
)
op.create_foreign_key(
'control_controls_ibfk_2',
'control_sections',
'sections',
['section_id'],
['id']
)
|
Add control_sections to relationships migration
|
Add control_sections to relationships migration
|
Python
|
apache-2.0
|
andrei-karalionak/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core
|
Add control_sections to relationships migration
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate control_sections to relationships
Revision ID: b0c3361797a
Revises: 32e064034091
Create Date: 2015-05-20 10:35:39.561584
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b0c3361797a'
down_revision = '32e064034091'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT modified_by_id, created_at, updated_at, control_id as source_id,
'Control' as source_type, section_id as destination_id,
'Section' as destination_type, context_id
FROM control_sections;
"""
op.execute(sql)
op.drop_constraint(
'control_sections_ibfk_1', 'control_sections', type_='foreignkey')
op.drop_constraint(
'control_sections_ibfk_2', 'control_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'control_sections_ibfk_1',
'control_sections',
'controls',
['control_id'],
['id']
)
op.create_foreign_key(
'control_controls_ibfk_2',
'control_sections',
'sections',
['section_id'],
['id']
)
|
<commit_before><commit_msg>Add control_sections to relationships migration<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate control_sections to relationships
Revision ID: b0c3361797a
Revises: 32e064034091
Create Date: 2015-05-20 10:35:39.561584
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b0c3361797a'
down_revision = '32e064034091'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT modified_by_id, created_at, updated_at, control_id as source_id,
'Control' as source_type, section_id as destination_id,
'Section' as destination_type, context_id
FROM control_sections;
"""
op.execute(sql)
op.drop_constraint(
'control_sections_ibfk_1', 'control_sections', type_='foreignkey')
op.drop_constraint(
'control_sections_ibfk_2', 'control_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'control_sections_ibfk_1',
'control_sections',
'controls',
['control_id'],
['id']
)
op.create_foreign_key(
'control_controls_ibfk_2',
'control_sections',
'sections',
['section_id'],
['id']
)
|
Add control_sections to relationships migration# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate control_sections to relationships
Revision ID: b0c3361797a
Revises: 32e064034091
Create Date: 2015-05-20 10:35:39.561584
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b0c3361797a'
down_revision = '32e064034091'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT modified_by_id, created_at, updated_at, control_id as source_id,
'Control' as source_type, section_id as destination_id,
'Section' as destination_type, context_id
FROM control_sections;
"""
op.execute(sql)
op.drop_constraint(
'control_sections_ibfk_1', 'control_sections', type_='foreignkey')
op.drop_constraint(
'control_sections_ibfk_2', 'control_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'control_sections_ibfk_1',
'control_sections',
'controls',
['control_id'],
['id']
)
op.create_foreign_key(
'control_controls_ibfk_2',
'control_sections',
'sections',
['section_id'],
['id']
)
|
<commit_before><commit_msg>Add control_sections to relationships migration<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate control_sections to relationships
Revision ID: b0c3361797a
Revises: 32e064034091
Create Date: 2015-05-20 10:35:39.561584
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b0c3361797a'
down_revision = '32e064034091'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT modified_by_id, created_at, updated_at, control_id as source_id,
'Control' as source_type, section_id as destination_id,
'Section' as destination_type, context_id
FROM control_sections;
"""
op.execute(sql)
op.drop_constraint(
'control_sections_ibfk_1', 'control_sections', type_='foreignkey')
op.drop_constraint(
'control_sections_ibfk_2', 'control_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'control_sections_ibfk_1',
'control_sections',
'controls',
['control_id'],
['id']
)
op.create_foreign_key(
'control_controls_ibfk_2',
'control_sections',
'sections',
['section_id'],
['id']
)
|
|
d3dbfdcdcab05aca5035b8dbeb1c73c2885ac9cd
|
DateThumbnails.py
|
DateThumbnails.py
|
'''Make a jpg images with thumbnails and date information for all files in directory
To-Do: Currently it will always make 5 * 3 thumbnails - need to calc this number
use argparse
add docs
add setup.py
'''
import os
from glob import glob
import exifread
import PIL
import matplotlib.pyplot as plt
try:
os.remove('thumb.jpg')
except OSError:
pass
filelist = glob('*')
n_pictures = len(filelist)
fig = plt.figure(figsize=(6,8))
for i,f in enumerate(filelist):
with open(f, 'rb') as imf:
tags = exifread.process_file(imf)
date = tags['EXIF DateTimeOriginal'].values
im = PIL.Image.open(imf)
im.thumbnail((128, 128))
im = np.array(im)
ax = fig.add_subplot(5,3,i+1)
ax.imshow(im)
ax.set_frame_on(False)
ax.axes.get_yaxis().set_visible(False)
ax.axes.get_xaxis().set_visible(False)
ax.set_title(date.split()[0])
fig.savefig('thumb.jpg')
|
Add script to generate thumbnaisl with data info fomr eXIF metadata
|
Add script to generate thumbnaisl with data info fomr eXIF metadata
|
Python
|
mit
|
hamogu/Photos
|
Add script to generate thumbnaisl with data info fomr eXIF metadata
|
'''Make a jpg images with thumbnails and date information for all files in directory
To-Do: Currently it will always make 5 * 3 thumbnails - need to calc this number
use argparse
add docs
add setup.py
'''
import os
from glob import glob
import exifread
import PIL
import matplotlib.pyplot as plt
try:
os.remove('thumb.jpg')
except OSError:
pass
filelist = glob('*')
n_pictures = len(filelist)
fig = plt.figure(figsize=(6,8))
for i,f in enumerate(filelist):
with open(f, 'rb') as imf:
tags = exifread.process_file(imf)
date = tags['EXIF DateTimeOriginal'].values
im = PIL.Image.open(imf)
im.thumbnail((128, 128))
im = np.array(im)
ax = fig.add_subplot(5,3,i+1)
ax.imshow(im)
ax.set_frame_on(False)
ax.axes.get_yaxis().set_visible(False)
ax.axes.get_xaxis().set_visible(False)
ax.set_title(date.split()[0])
fig.savefig('thumb.jpg')
|
<commit_before><commit_msg>Add script to generate thumbnaisl with data info fomr eXIF metadata<commit_after>
|
'''Make a jpg images with thumbnails and date information for all files in directory
To-Do: Currently it will always make 5 * 3 thumbnails - need to calc this number
use argparse
add docs
add setup.py
'''
import os
from glob import glob
import exifread
import PIL
import matplotlib.pyplot as plt
try:
os.remove('thumb.jpg')
except OSError:
pass
filelist = glob('*')
n_pictures = len(filelist)
fig = plt.figure(figsize=(6,8))
for i,f in enumerate(filelist):
with open(f, 'rb') as imf:
tags = exifread.process_file(imf)
date = tags['EXIF DateTimeOriginal'].values
im = PIL.Image.open(imf)
im.thumbnail((128, 128))
im = np.array(im)
ax = fig.add_subplot(5,3,i+1)
ax.imshow(im)
ax.set_frame_on(False)
ax.axes.get_yaxis().set_visible(False)
ax.axes.get_xaxis().set_visible(False)
ax.set_title(date.split()[0])
fig.savefig('thumb.jpg')
|
Add script to generate thumbnaisl with data info fomr eXIF metadata'''Make a jpg images with thumbnails and date information for all files in directory
To-Do: Currently it will always make 5 * 3 thumbnails - need to calc this number
use argparse
add docs
add setup.py
'''
import os
from glob import glob
import exifread
import PIL
import matplotlib.pyplot as plt
try:
os.remove('thumb.jpg')
except OSError:
pass
filelist = glob('*')
n_pictures = len(filelist)
fig = plt.figure(figsize=(6,8))
for i,f in enumerate(filelist):
with open(f, 'rb') as imf:
tags = exifread.process_file(imf)
date = tags['EXIF DateTimeOriginal'].values
im = PIL.Image.open(imf)
im.thumbnail((128, 128))
im = np.array(im)
ax = fig.add_subplot(5,3,i+1)
ax.imshow(im)
ax.set_frame_on(False)
ax.axes.get_yaxis().set_visible(False)
ax.axes.get_xaxis().set_visible(False)
ax.set_title(date.split()[0])
fig.savefig('thumb.jpg')
|
<commit_before><commit_msg>Add script to generate thumbnaisl with data info fomr eXIF metadata<commit_after>'''Make a jpg images with thumbnails and date information for all files in directory
To-Do: Currently it will always make 5 * 3 thumbnails - need to calc this number
use argparse
add docs
add setup.py
'''
import os
from glob import glob
import exifread
import PIL
import matplotlib.pyplot as plt
try:
os.remove('thumb.jpg')
except OSError:
pass
filelist = glob('*')
n_pictures = len(filelist)
fig = plt.figure(figsize=(6,8))
for i,f in enumerate(filelist):
with open(f, 'rb') as imf:
tags = exifread.process_file(imf)
date = tags['EXIF DateTimeOriginal'].values
im = PIL.Image.open(imf)
im.thumbnail((128, 128))
im = np.array(im)
ax = fig.add_subplot(5,3,i+1)
ax.imshow(im)
ax.set_frame_on(False)
ax.axes.get_yaxis().set_visible(False)
ax.axes.get_xaxis().set_visible(False)
ax.set_title(date.split()[0])
fig.savefig('thumb.jpg')
|
|
14cb4e5ff560f1122944323ce4fefd66c85f26e6
|
Python/59_SpiralMatrixII.py
|
Python/59_SpiralMatrixII.py
|
class Solution(object):
def generateMatrix(self, n):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
#initialize matrix
matrix = [[0 for x in range(n)] for x in range(n)]
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':n-1, 'top':0, 'bottom':n-1}
mSize = n*n
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
i=1
while mSize > 0:
matrix[curRow][curCol] = i
i += 1
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return matrix
n = 4
#print matrix
foo = Solution()
print foo.spiralOrder(n)
|
Add solution for 59 Spiral Matrix II
|
Add solution for 59 Spiral Matrix II
|
Python
|
mit
|
comicxmz001/LeetCode,comicxmz001/LeetCode
|
Add solution for 59 Spiral Matrix II
|
class Solution(object):
def generateMatrix(self, n):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
#initialize matrix
matrix = [[0 for x in range(n)] for x in range(n)]
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':n-1, 'top':0, 'bottom':n-1}
mSize = n*n
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
i=1
while mSize > 0:
matrix[curRow][curCol] = i
i += 1
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return matrix
n = 4
#print matrix
foo = Solution()
print foo.spiralOrder(n)
|
<commit_before><commit_msg>Add solution for 59 Spiral Matrix II<commit_after>
|
class Solution(object):
def generateMatrix(self, n):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
#initialize matrix
matrix = [[0 for x in range(n)] for x in range(n)]
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':n-1, 'top':0, 'bottom':n-1}
mSize = n*n
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
i=1
while mSize > 0:
matrix[curRow][curCol] = i
i += 1
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return matrix
n = 4
#print matrix
foo = Solution()
print foo.spiralOrder(n)
|
Add solution for 59 Spiral Matrix IIclass Solution(object):
def generateMatrix(self, n):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
#initialize matrix
matrix = [[0 for x in range(n)] for x in range(n)]
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':n-1, 'top':0, 'bottom':n-1}
mSize = n*n
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
i=1
while mSize > 0:
matrix[curRow][curCol] = i
i += 1
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return matrix
n = 4
#print matrix
foo = Solution()
print foo.spiralOrder(n)
|
<commit_before><commit_msg>Add solution for 59 Spiral Matrix II<commit_after>class Solution(object):
def generateMatrix(self, n):
"""
:type matrix: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
if n == 1:
return [[1]]
#initialize matrix
matrix = [[0 for x in range(n)] for x in range(n)]
direction ={'LEFT':0,'RIGHT':1, 'DOWN':2, 'UP':3}
margin = {'left':0,'right':n-1, 'top':0, 'bottom':n-1}
mSize = n*n
d = direction['RIGHT'] #direction
curRow = 0 #row cursor
curCol = 0 #col cursor
i=1
while mSize > 0:
matrix[curRow][curCol] = i
i += 1
mSize -=1
if d == direction['RIGHT']:
if curCol == margin['right']:
d = direction['DOWN']
margin['top'] +=1
curRow += 1
continue
curCol +=1
continue
if d == direction['LEFT']:
if curCol == margin['left']:
d = direction['UP']
margin['bottom'] -= 1
curRow -=1
continue
curCol -=1
continue
if d == direction['DOWN']:
if curRow == margin['bottom']:
d = direction['LEFT']
margin['right'] -= 1
curCol -= 1
continue
curRow +=1
continue
if d == direction['UP']:
if curRow == margin['top']:
d = direction['RIGHT']
margin['left'] += 1
curCol += 1
continue
curRow -=1
continue
return matrix
n = 4
#print matrix
foo = Solution()
print foo.spiralOrder(n)
|
|
475c968a16809a386a1249de396d0574623b682d
|
debates2txt.py
|
debates2txt.py
|
#!/usr/bin/env python
# -.*- coding: utf-8 -.*-
"""Script to save text in xml file(s) to text file(s).
Usage: debates2txt.py <xml-file or directory containing xml files>
2014-12-15 j.vanderzwaan@esciencecenter.nl
"""
import argparse
import xml.etree.ElementTree as ET
import re
import os
import codecs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('xml', help='the name of the xml file containing the '
'the word field counts should be extracted for')
args = parser.parse_args()
# file or directory?
if os.path.isfile(args.xml):
files = [args.xml]
else:
files = []
for fn in os.listdir(args.xml):
file_name = '{}{}{}'.format(args.xml, os.sep, fn)
if os.path.isfile(file_name):
files.append(file_name)
for input_file in files:
# read xml file
tree = ET.parse(input_file)
root = tree.getroot()
lines = []
for speech in tree.getiterator('speech'):
speaker = speech.attrib.get('speaker')
text = ET.tostring(speech)
# remove xml tags
text = re.sub('<[^>]*>', '', text)
# remove html entities (e.g., ɣ)
text = re.sub('&#\d+;', '', text)
lines.append(text)
_head, tail = os.path.split(input_file)
print tail
out_file = '{}.txt'.format(tail.split('.')[0])
with codecs.open(out_file, 'wb', 'utf-8') as f:
f.write('\n'.join(lines))
|
Add script to generate text file from the debates data (xml)
|
Add script to generate text file from the debates data (xml)
Upon request by Inger Leemans. Adjusted the debates2csv script to save
all text in the xml file(s) to txt file(s).
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add script to generate text file from the debates data (xml)
Upon request by Inger Leemans. Adjusted the debates2csv script to save
all text in the xml file(s) to txt file(s).
|
#!/usr/bin/env python
# -.*- coding: utf-8 -.*-
"""Script to save text in xml file(s) to text file(s).
Usage: debates2txt.py <xml-file or directory containing xml files>
2014-12-15 j.vanderzwaan@esciencecenter.nl
"""
import argparse
import xml.etree.ElementTree as ET
import re
import os
import codecs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('xml', help='the name of the xml file containing the '
'the word field counts should be extracted for')
args = parser.parse_args()
# file or directory?
if os.path.isfile(args.xml):
files = [args.xml]
else:
files = []
for fn in os.listdir(args.xml):
file_name = '{}{}{}'.format(args.xml, os.sep, fn)
if os.path.isfile(file_name):
files.append(file_name)
for input_file in files:
# read xml file
tree = ET.parse(input_file)
root = tree.getroot()
lines = []
for speech in tree.getiterator('speech'):
speaker = speech.attrib.get('speaker')
text = ET.tostring(speech)
# remove xml tags
text = re.sub('<[^>]*>', '', text)
# remove html entities (e.g., ɣ)
text = re.sub('&#\d+;', '', text)
lines.append(text)
_head, tail = os.path.split(input_file)
print tail
out_file = '{}.txt'.format(tail.split('.')[0])
with codecs.open(out_file, 'wb', 'utf-8') as f:
f.write('\n'.join(lines))
|
<commit_before><commit_msg>Add script to generate text file from the debates data (xml)
Upon request by Inger Leemans. Adjusted the debates2csv script to save
all text in the xml file(s) to txt file(s).<commit_after>
|
#!/usr/bin/env python
# -.*- coding: utf-8 -.*-
"""Script to save text in xml file(s) to text file(s).
Usage: debates2txt.py <xml-file or directory containing xml files>
2014-12-15 j.vanderzwaan@esciencecenter.nl
"""
import argparse
import xml.etree.ElementTree as ET
import re
import os
import codecs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('xml', help='the name of the xml file containing the '
'the word field counts should be extracted for')
args = parser.parse_args()
# file or directory?
if os.path.isfile(args.xml):
files = [args.xml]
else:
files = []
for fn in os.listdir(args.xml):
file_name = '{}{}{}'.format(args.xml, os.sep, fn)
if os.path.isfile(file_name):
files.append(file_name)
for input_file in files:
# read xml file
tree = ET.parse(input_file)
root = tree.getroot()
lines = []
for speech in tree.getiterator('speech'):
speaker = speech.attrib.get('speaker')
text = ET.tostring(speech)
# remove xml tags
text = re.sub('<[^>]*>', '', text)
# remove html entities (e.g., ɣ)
text = re.sub('&#\d+;', '', text)
lines.append(text)
_head, tail = os.path.split(input_file)
print tail
out_file = '{}.txt'.format(tail.split('.')[0])
with codecs.open(out_file, 'wb', 'utf-8') as f:
f.write('\n'.join(lines))
|
Add script to generate text file from the debates data (xml)
Upon request by Inger Leemans. Adjusted the debates2csv script to save
all text in the xml file(s) to txt file(s).#!/usr/bin/env python
# -.*- coding: utf-8 -.*-
"""Script to save text in xml file(s) to text file(s).
Usage: debates2txt.py <xml-file or directory containing xml files>
2014-12-15 j.vanderzwaan@esciencecenter.nl
"""
import argparse
import xml.etree.ElementTree as ET
import re
import os
import codecs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('xml', help='the name of the xml file containing the '
'the word field counts should be extracted for')
args = parser.parse_args()
# file or directory?
if os.path.isfile(args.xml):
files = [args.xml]
else:
files = []
for fn in os.listdir(args.xml):
file_name = '{}{}{}'.format(args.xml, os.sep, fn)
if os.path.isfile(file_name):
files.append(file_name)
for input_file in files:
# read xml file
tree = ET.parse(input_file)
root = tree.getroot()
lines = []
for speech in tree.getiterator('speech'):
speaker = speech.attrib.get('speaker')
text = ET.tostring(speech)
# remove xml tags
text = re.sub('<[^>]*>', '', text)
# remove html entities (e.g., ɣ)
text = re.sub('&#\d+;', '', text)
lines.append(text)
_head, tail = os.path.split(input_file)
print tail
out_file = '{}.txt'.format(tail.split('.')[0])
with codecs.open(out_file, 'wb', 'utf-8') as f:
f.write('\n'.join(lines))
|
<commit_before><commit_msg>Add script to generate text file from the debates data (xml)
Upon request by Inger Leemans. Adjusted the debates2csv script to save
all text in the xml file(s) to txt file(s).<commit_after>#!/usr/bin/env python
# -.*- coding: utf-8 -.*-
"""Script to save text in xml file(s) to text file(s).
Usage: debates2txt.py <xml-file or directory containing xml files>
2014-12-15 j.vanderzwaan@esciencecenter.nl
"""
import argparse
import xml.etree.ElementTree as ET
import re
import os
import codecs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('xml', help='the name of the xml file containing the '
'the word field counts should be extracted for')
args = parser.parse_args()
# file or directory?
if os.path.isfile(args.xml):
files = [args.xml]
else:
files = []
for fn in os.listdir(args.xml):
file_name = '{}{}{}'.format(args.xml, os.sep, fn)
if os.path.isfile(file_name):
files.append(file_name)
for input_file in files:
# read xml file
tree = ET.parse(input_file)
root = tree.getroot()
lines = []
for speech in tree.getiterator('speech'):
speaker = speech.attrib.get('speaker')
text = ET.tostring(speech)
# remove xml tags
text = re.sub('<[^>]*>', '', text)
# remove html entities (e.g., ɣ)
text = re.sub('&#\d+;', '', text)
lines.append(text)
_head, tail = os.path.split(input_file)
print tail
out_file = '{}.txt'.format(tail.split('.')[0])
with codecs.open(out_file, 'wb', 'utf-8') as f:
f.write('\n'.join(lines))
|
|
cc639eea9e4c4934c6aac9cd04b4a09da60f1403
|
custom/enikshay/management/commands/check_voucher_cases.py
|
custom/enikshay/management/commands/check_voucher_cases.py
|
import csv
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import get_person_case_from_voucher, CASE_TYPE_VOUCHER
from custom.enikshay.const import PERSON_FIRST_NAME, PERSON_LAST_NAME
class Command(BaseCommand):
help = """
Check all enikshay voucher cases to see which ones have been set to "paid"
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
accessor = CaseAccessors(domain)
voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER)
rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']]
for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)):
if voucher.get_case_property('state') in ('paid', 'rejected'):
person = get_person_case_from_voucher(domain, voucher.case_id)
rows.append([
voucher.case_id,
voucher.get_case_property('state'),
voucher.get_case_property('comments'),
person.case_id,
"{} {}".format(person.get_case_property(PERSON_FIRST_NAME),
person.get_case_property(PERSON_LAST_NAME)),
])
filename = 'voucher_statuses.csv'
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
print ('{} cases have a status of paid or rejected. Details written to {}'
.format(len(rows) - 1, filename))
|
Add cmd to check on the status of voucher cases
|
Add cmd to check on the status of voucher cases
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add cmd to check on the status of voucher cases
|
import csv
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import get_person_case_from_voucher, CASE_TYPE_VOUCHER
from custom.enikshay.const import PERSON_FIRST_NAME, PERSON_LAST_NAME
class Command(BaseCommand):
help = """
Check all enikshay voucher cases to see which ones have been set to "paid"
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
accessor = CaseAccessors(domain)
voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER)
rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']]
for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)):
if voucher.get_case_property('state') in ('paid', 'rejected'):
person = get_person_case_from_voucher(domain, voucher.case_id)
rows.append([
voucher.case_id,
voucher.get_case_property('state'),
voucher.get_case_property('comments'),
person.case_id,
"{} {}".format(person.get_case_property(PERSON_FIRST_NAME),
person.get_case_property(PERSON_LAST_NAME)),
])
filename = 'voucher_statuses.csv'
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
print ('{} cases have a status of paid or rejected. Details written to {}'
.format(len(rows) - 1, filename))
|
<commit_before><commit_msg>Add cmd to check on the status of voucher cases<commit_after>
|
import csv
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import get_person_case_from_voucher, CASE_TYPE_VOUCHER
from custom.enikshay.const import PERSON_FIRST_NAME, PERSON_LAST_NAME
class Command(BaseCommand):
help = """
Check all enikshay voucher cases to see which ones have been set to "paid"
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
accessor = CaseAccessors(domain)
voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER)
rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']]
for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)):
if voucher.get_case_property('state') in ('paid', 'rejected'):
person = get_person_case_from_voucher(domain, voucher.case_id)
rows.append([
voucher.case_id,
voucher.get_case_property('state'),
voucher.get_case_property('comments'),
person.case_id,
"{} {}".format(person.get_case_property(PERSON_FIRST_NAME),
person.get_case_property(PERSON_LAST_NAME)),
])
filename = 'voucher_statuses.csv'
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
print ('{} cases have a status of paid or rejected. Details written to {}'
.format(len(rows) - 1, filename))
|
Add cmd to check on the status of voucher casesimport csv
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import get_person_case_from_voucher, CASE_TYPE_VOUCHER
from custom.enikshay.const import PERSON_FIRST_NAME, PERSON_LAST_NAME
class Command(BaseCommand):
help = """
Check all enikshay voucher cases to see which ones have been set to "paid"
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
accessor = CaseAccessors(domain)
voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER)
rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']]
for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)):
if voucher.get_case_property('state') in ('paid', 'rejected'):
person = get_person_case_from_voucher(domain, voucher.case_id)
rows.append([
voucher.case_id,
voucher.get_case_property('state'),
voucher.get_case_property('comments'),
person.case_id,
"{} {}".format(person.get_case_property(PERSON_FIRST_NAME),
person.get_case_property(PERSON_LAST_NAME)),
])
filename = 'voucher_statuses.csv'
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
print ('{} cases have a status of paid or rejected. Details written to {}'
.format(len(rows) - 1, filename))
|
<commit_before><commit_msg>Add cmd to check on the status of voucher cases<commit_after>import csv
from django.core.management.base import BaseCommand
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import get_person_case_from_voucher, CASE_TYPE_VOUCHER
from custom.enikshay.const import PERSON_FIRST_NAME, PERSON_LAST_NAME
class Command(BaseCommand):
help = """
Check all enikshay voucher cases to see which ones have been set to "paid"
"""
def add_arguments(self, parser):
parser.add_argument('domain')
def handle(self, domain, **options):
accessor = CaseAccessors(domain)
voucher_ids = accessor.get_case_ids_in_domain(CASE_TYPE_VOUCHER)
rows = [['voucher_id', 'state', 'comments', 'person_id', 'person_name']]
for voucher in with_progress_bar(accessor.iter_cases(voucher_ids), len(voucher_ids)):
if voucher.get_case_property('state') in ('paid', 'rejected'):
person = get_person_case_from_voucher(domain, voucher.case_id)
rows.append([
voucher.case_id,
voucher.get_case_property('state'),
voucher.get_case_property('comments'),
person.case_id,
"{} {}".format(person.get_case_property(PERSON_FIRST_NAME),
person.get_case_property(PERSON_LAST_NAME)),
])
filename = 'voucher_statuses.csv'
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
print ('{} cases have a status of paid or rejected. Details written to {}'
.format(len(rows) - 1, filename))
|
|
18f43e38df3e21e3625351f531fc1d7d5e017621
|
demo_propagation.py
|
demo_propagation.py
|
# Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple demo showcasing how to use the fire propagation module and how to display the result"""
import matplotlib.pyplot as plt
from fire_rs.firemodel import propagation
from fire_rs.geodata.display import GeoDataDisplay, plot_ignition_point
from fire_rs.geodata.geo_data import TimedPoint
if __name__ == "__main__":
area = ((480060.0, 485060.0), (6210074.0, 6215074.0))
env = propagation.Environment(area, wind_speed=5., wind_dir=0.)
ignition_point = TimedPoint(area[0][0] + 1000.0, area[1][0] + 2000.0, 0)
fire = propagation.propagate_from_points(env, ignition_point, 3000)
gdd = GeoDataDisplay.pyplot_figure(env.raster.combine(fire.ignitions().slice(["ignition"])))
gdd.draw_elevation_shade(with_colorbar=False)
gdd.draw_wind_quiver()
gdd.draw_ignition_contour(with_labels=True)
plot_ignition_point(gdd.axis, ignition_point)
plt.plot(block=True)
|
Add python script that plots the results of a fire propagation
|
Add python script that plots the results of a fire propagation
|
Python
|
bsd-2-clause
|
fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop
|
Add python script that plots the results of a fire propagation
|
# Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple demo showcasing how to use the fire propagation module and how to display the result"""
import matplotlib.pyplot as plt
from fire_rs.firemodel import propagation
from fire_rs.geodata.display import GeoDataDisplay, plot_ignition_point
from fire_rs.geodata.geo_data import TimedPoint
if __name__ == "__main__":
area = ((480060.0, 485060.0), (6210074.0, 6215074.0))
env = propagation.Environment(area, wind_speed=5., wind_dir=0.)
ignition_point = TimedPoint(area[0][0] + 1000.0, area[1][0] + 2000.0, 0)
fire = propagation.propagate_from_points(env, ignition_point, 3000)
gdd = GeoDataDisplay.pyplot_figure(env.raster.combine(fire.ignitions().slice(["ignition"])))
gdd.draw_elevation_shade(with_colorbar=False)
gdd.draw_wind_quiver()
gdd.draw_ignition_contour(with_labels=True)
plot_ignition_point(gdd.axis, ignition_point)
plt.plot(block=True)
|
<commit_before><commit_msg>Add python script that plots the results of a fire propagation<commit_after>
|
# Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple demo showcasing how to use the fire propagation module and how to display the result"""
import matplotlib.pyplot as plt
from fire_rs.firemodel import propagation
from fire_rs.geodata.display import GeoDataDisplay, plot_ignition_point
from fire_rs.geodata.geo_data import TimedPoint
if __name__ == "__main__":
area = ((480060.0, 485060.0), (6210074.0, 6215074.0))
env = propagation.Environment(area, wind_speed=5., wind_dir=0.)
ignition_point = TimedPoint(area[0][0] + 1000.0, area[1][0] + 2000.0, 0)
fire = propagation.propagate_from_points(env, ignition_point, 3000)
gdd = GeoDataDisplay.pyplot_figure(env.raster.combine(fire.ignitions().slice(["ignition"])))
gdd.draw_elevation_shade(with_colorbar=False)
gdd.draw_wind_quiver()
gdd.draw_ignition_contour(with_labels=True)
plot_ignition_point(gdd.axis, ignition_point)
plt.plot(block=True)
|
Add python script that plots the results of a fire propagation# Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple demo showcasing how to use the fire propagation module and how to display the result"""
import matplotlib.pyplot as plt
from fire_rs.firemodel import propagation
from fire_rs.geodata.display import GeoDataDisplay, plot_ignition_point
from fire_rs.geodata.geo_data import TimedPoint
if __name__ == "__main__":
area = ((480060.0, 485060.0), (6210074.0, 6215074.0))
env = propagation.Environment(area, wind_speed=5., wind_dir=0.)
ignition_point = TimedPoint(area[0][0] + 1000.0, area[1][0] + 2000.0, 0)
fire = propagation.propagate_from_points(env, ignition_point, 3000)
gdd = GeoDataDisplay.pyplot_figure(env.raster.combine(fire.ignitions().slice(["ignition"])))
gdd.draw_elevation_shade(with_colorbar=False)
gdd.draw_wind_quiver()
gdd.draw_ignition_contour(with_labels=True)
plot_ignition_point(gdd.axis, ignition_point)
plt.plot(block=True)
|
<commit_before><commit_msg>Add python script that plots the results of a fire propagation<commit_after># Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple demo showcasing how to use the fire propagation module and how to display the result"""
import matplotlib.pyplot as plt
from fire_rs.firemodel import propagation
from fire_rs.geodata.display import GeoDataDisplay, plot_ignition_point
from fire_rs.geodata.geo_data import TimedPoint
if __name__ == "__main__":
area = ((480060.0, 485060.0), (6210074.0, 6215074.0))
env = propagation.Environment(area, wind_speed=5., wind_dir=0.)
ignition_point = TimedPoint(area[0][0] + 1000.0, area[1][0] + 2000.0, 0)
fire = propagation.propagate_from_points(env, ignition_point, 3000)
gdd = GeoDataDisplay.pyplot_figure(env.raster.combine(fire.ignitions().slice(["ignition"])))
gdd.draw_elevation_shade(with_colorbar=False)
gdd.draw_wind_quiver()
gdd.draw_ignition_contour(with_labels=True)
plot_ignition_point(gdd.axis, ignition_point)
plt.plot(block=True)
|
|
38fcdaa72d666c43975d53f99792b339d5b9747d
|
dockci/migrations/0007.py
|
dockci/migrations/0007.py
|
"""
Fix artifacts after rename from build models to job models
"""
import py.error
import py.path
import yaml
from yaml import safe_load as yaml_load
jobs_path = py.path.local().join('data', 'jobs')
for project_path in jobs_path.listdir():
build_files = project_path.listdir(
lambda filename: filename.fnmatch('*.yaml')
)
for build_file in build_files:
build_slug = build_file.purebasename
build_config_file = project_path.join(
'%s_output' % build_slug, 'dockci.yaml',
)
try:
with build_config_file.open() as handle:
build_config_data = yaml_load(handle)
if 'build_output' in build_config_data:
build_config_data['job_output'] = \
build_config_data.pop('build_output')
with build_config_file.open('w') as handle:
yaml.dump(build_config_data, handle, default_flow_style=False)
except py.error.ENOENT:
pass
|
Migrate build -> job in dockci job config meta
|
Migrate build -> job in dockci job config meta
|
Python
|
isc
|
RickyCook/DockCI,sprucedev/DockCI-Agent,RickyCook/DockCI,sprucedev/DockCI,sprucedev/DockCI-Agent,sprucedev/DockCI,RickyCook/DockCI,sprucedev/DockCI,RickyCook/DockCI,sprucedev/DockCI
|
Migrate build -> job in dockci job config meta
|
"""
Fix artifacts after rename from build models to job models
"""
import py.error
import py.path
import yaml
from yaml import safe_load as yaml_load
jobs_path = py.path.local().join('data', 'jobs')
for project_path in jobs_path.listdir():
build_files = project_path.listdir(
lambda filename: filename.fnmatch('*.yaml')
)
for build_file in build_files:
build_slug = build_file.purebasename
build_config_file = project_path.join(
'%s_output' % build_slug, 'dockci.yaml',
)
try:
with build_config_file.open() as handle:
build_config_data = yaml_load(handle)
if 'build_output' in build_config_data:
build_config_data['job_output'] = \
build_config_data.pop('build_output')
with build_config_file.open('w') as handle:
yaml.dump(build_config_data, handle, default_flow_style=False)
except py.error.ENOENT:
pass
|
<commit_before><commit_msg>Migrate build -> job in dockci job config meta<commit_after>
|
"""
Fix artifacts after rename from build models to job models
"""
import py.error
import py.path
import yaml
from yaml import safe_load as yaml_load
jobs_path = py.path.local().join('data', 'jobs')
for project_path in jobs_path.listdir():
build_files = project_path.listdir(
lambda filename: filename.fnmatch('*.yaml')
)
for build_file in build_files:
build_slug = build_file.purebasename
build_config_file = project_path.join(
'%s_output' % build_slug, 'dockci.yaml',
)
try:
with build_config_file.open() as handle:
build_config_data = yaml_load(handle)
if 'build_output' in build_config_data:
build_config_data['job_output'] = \
build_config_data.pop('build_output')
with build_config_file.open('w') as handle:
yaml.dump(build_config_data, handle, default_flow_style=False)
except py.error.ENOENT:
pass
|
Migrate build -> job in dockci job config meta"""
Fix artifacts after rename from build models to job models
"""
import py.error
import py.path
import yaml
from yaml import safe_load as yaml_load
jobs_path = py.path.local().join('data', 'jobs')
for project_path in jobs_path.listdir():
build_files = project_path.listdir(
lambda filename: filename.fnmatch('*.yaml')
)
for build_file in build_files:
build_slug = build_file.purebasename
build_config_file = project_path.join(
'%s_output' % build_slug, 'dockci.yaml',
)
try:
with build_config_file.open() as handle:
build_config_data = yaml_load(handle)
if 'build_output' in build_config_data:
build_config_data['job_output'] = \
build_config_data.pop('build_output')
with build_config_file.open('w') as handle:
yaml.dump(build_config_data, handle, default_flow_style=False)
except py.error.ENOENT:
pass
|
<commit_before><commit_msg>Migrate build -> job in dockci job config meta<commit_after>"""
Fix artifacts after rename from build models to job models
"""
import py.error
import py.path
import yaml
from yaml import safe_load as yaml_load
jobs_path = py.path.local().join('data', 'jobs')
for project_path in jobs_path.listdir():
build_files = project_path.listdir(
lambda filename: filename.fnmatch('*.yaml')
)
for build_file in build_files:
build_slug = build_file.purebasename
build_config_file = project_path.join(
'%s_output' % build_slug, 'dockci.yaml',
)
try:
with build_config_file.open() as handle:
build_config_data = yaml_load(handle)
if 'build_output' in build_config_data:
build_config_data['job_output'] = \
build_config_data.pop('build_output')
with build_config_file.open('w') as handle:
yaml.dump(build_config_data, handle, default_flow_style=False)
except py.error.ENOENT:
pass
|
|
76d47df7fdb9af30437b2cad8f2800e907b60fd9
|
tests/test_injections.py
|
tests/test_injections.py
|
"""Dependency injector container unit tests."""
import unittest2 as unittest
from dependency_injector import injections
from dependency_injector import providers
class PositionalInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.PositionalInjection(value=1)
def test_isinstance(self):
injection = injections.PositionalInjection(1)
self.assertIsInstance(injection, injections.Injection)
def test_get_value_with_not_provider(self):
injection = injections.PositionalInjection(123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.PositionalInjection(providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
class NamedInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.NamedInjection(name='name', value=1)
def test_isinstance(self):
injection = injections.NamedInjection('name', 1)
self.assertIsInstance(injection, injections.Injection)
def test_get_name(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_name(), 'name')
def test_get_value_with_not_provider(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.NamedInjection('name',
providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
|
Add few tests for injections
|
Add few tests for injections
|
Python
|
bsd-3-clause
|
rmk135/dependency_injector,rmk135/objects,ets-labs/dependency_injector,ets-labs/python-dependency-injector
|
Add few tests for injections
|
"""Dependency injector container unit tests."""
import unittest2 as unittest
from dependency_injector import injections
from dependency_injector import providers
class PositionalInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.PositionalInjection(value=1)
def test_isinstance(self):
injection = injections.PositionalInjection(1)
self.assertIsInstance(injection, injections.Injection)
def test_get_value_with_not_provider(self):
injection = injections.PositionalInjection(123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.PositionalInjection(providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
class NamedInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.NamedInjection(name='name', value=1)
def test_isinstance(self):
injection = injections.NamedInjection('name', 1)
self.assertIsInstance(injection, injections.Injection)
def test_get_name(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_name(), 'name')
def test_get_value_with_not_provider(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.NamedInjection('name',
providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
|
<commit_before><commit_msg>Add few tests for injections<commit_after>
|
"""Dependency injector container unit tests."""
import unittest2 as unittest
from dependency_injector import injections
from dependency_injector import providers
class PositionalInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.PositionalInjection(value=1)
def test_isinstance(self):
injection = injections.PositionalInjection(1)
self.assertIsInstance(injection, injections.Injection)
def test_get_value_with_not_provider(self):
injection = injections.PositionalInjection(123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.PositionalInjection(providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
class NamedInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.NamedInjection(name='name', value=1)
def test_isinstance(self):
injection = injections.NamedInjection('name', 1)
self.assertIsInstance(injection, injections.Injection)
def test_get_name(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_name(), 'name')
def test_get_value_with_not_provider(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.NamedInjection('name',
providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
|
Add few tests for injections"""Dependency injector container unit tests."""
import unittest2 as unittest
from dependency_injector import injections
from dependency_injector import providers
class PositionalInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.PositionalInjection(value=1)
def test_isinstance(self):
injection = injections.PositionalInjection(1)
self.assertIsInstance(injection, injections.Injection)
def test_get_value_with_not_provider(self):
injection = injections.PositionalInjection(123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.PositionalInjection(providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
class NamedInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.NamedInjection(name='name', value=1)
def test_isinstance(self):
injection = injections.NamedInjection('name', 1)
self.assertIsInstance(injection, injections.Injection)
def test_get_name(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_name(), 'name')
def test_get_value_with_not_provider(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.NamedInjection('name',
providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
|
<commit_before><commit_msg>Add few tests for injections<commit_after>"""Dependency injector container unit tests."""
import unittest2 as unittest
from dependency_injector import injections
from dependency_injector import providers
class PositionalInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.PositionalInjection(value=1)
def test_isinstance(self):
injection = injections.PositionalInjection(1)
self.assertIsInstance(injection, injections.Injection)
def test_get_value_with_not_provider(self):
injection = injections.PositionalInjection(123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.PositionalInjection(providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
class NamedInjectionTests(unittest.TestCase):
def test_init_with_kwargs(self):
injections.NamedInjection(name='name', value=1)
def test_isinstance(self):
injection = injections.NamedInjection('name', 1)
self.assertIsInstance(injection, injections.Injection)
def test_get_name(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_name(), 'name')
def test_get_value_with_not_provider(self):
injection = injections.NamedInjection('name', 123)
self.assertEquals(injection.get_value(), 123)
def test_get_value_with_factory(self):
injection = injections.NamedInjection('name',
providers.Factory(object))
obj1 = injection.get_value()
obj2 = injection.get_value()
self.assertIs(type(obj1), object)
self.assertIs(type(obj2), object)
self.assertIsNot(obj1, obj2)
|
|
d659a53036bd39b66e92e89e7c40572d111dcd53
|
tests/test_invocation.py
|
tests/test_invocation.py
|
import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
Add test to capture than honcho can invoke under runpy.
|
Add test to capture than honcho can invoke under runpy.
|
Python
|
mit
|
nickstenning/honcho,nickstenning/honcho
|
Add test to capture than honcho can invoke under runpy.
|
import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
<commit_before><commit_msg>Add test to capture than honcho can invoke under runpy.<commit_after>
|
import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
Add test to capture than honcho can invoke under runpy.import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
<commit_before><commit_msg>Add test to capture than honcho can invoke under runpy.<commit_after>import sys
import subprocess
import re
def test_runpy_invoke():
"""
Ensure honcho can also be invoked using runpy (python -m)
"""
cmd = [sys.executable, '-m', 'honcho', 'version']
output = subprocess.check_output(cmd, universal_newlines=True)
assert re.match(r'honcho \d\.\d\.\d.*\n', output)
|
|
f65bfcef8da9537a69db8ea7e24fa11dbb630100
|
tests/test_validation.py
|
tests/test_validation.py
|
"""Test the validation module"""
from pytest import mark
from json import loads
from gobble.collection import Collection
from gobble.config import EXAMPLES_DIR
from gobble.validation import Validator
collection = Collection(EXAMPLES_DIR, flavour='fiscal')
packages = collection.packages
is_valid = map(lambda path: 'invalid' not in path, collection.filepaths)
# noinspection PyShadowingNames
@mark.parametrize('package, is_valid', zip(packages, is_valid))
def test_assert_validation_correct(package, is_valid):
assert Validator(package).is_valid is is_valid
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_has_timestamp(package):
assert 'timestamp' in Validator(package).report.keys()
assert 'is_valid' in Validator(package).report.keys()
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_is_saved_as_json(package):
Validator(package).save('/tmp/test.json')
assert isinstance(loads(open('/tmp/test.json').read()), dict)
|
Write the tests for the Validation class.
|
Write the tests for the Validation class.
|
Python
|
mit
|
openspending/gobble
|
Write the tests for the Validation class.
|
"""Test the validation module"""
from pytest import mark
from json import loads
from gobble.collection import Collection
from gobble.config import EXAMPLES_DIR
from gobble.validation import Validator
collection = Collection(EXAMPLES_DIR, flavour='fiscal')
packages = collection.packages
is_valid = map(lambda path: 'invalid' not in path, collection.filepaths)
# noinspection PyShadowingNames
@mark.parametrize('package, is_valid', zip(packages, is_valid))
def test_assert_validation_correct(package, is_valid):
assert Validator(package).is_valid is is_valid
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_has_timestamp(package):
assert 'timestamp' in Validator(package).report.keys()
assert 'is_valid' in Validator(package).report.keys()
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_is_saved_as_json(package):
Validator(package).save('/tmp/test.json')
assert isinstance(loads(open('/tmp/test.json').read()), dict)
|
<commit_before><commit_msg>Write the tests for the Validation class.<commit_after>
|
"""Test the validation module"""
from pytest import mark
from json import loads
from gobble.collection import Collection
from gobble.config import EXAMPLES_DIR
from gobble.validation import Validator
collection = Collection(EXAMPLES_DIR, flavour='fiscal')
packages = collection.packages
is_valid = map(lambda path: 'invalid' not in path, collection.filepaths)
# noinspection PyShadowingNames
@mark.parametrize('package, is_valid', zip(packages, is_valid))
def test_assert_validation_correct(package, is_valid):
assert Validator(package).is_valid is is_valid
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_has_timestamp(package):
assert 'timestamp' in Validator(package).report.keys()
assert 'is_valid' in Validator(package).report.keys()
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_is_saved_as_json(package):
Validator(package).save('/tmp/test.json')
assert isinstance(loads(open('/tmp/test.json').read()), dict)
|
Write the tests for the Validation class."""Test the validation module"""
from pytest import mark
from json import loads
from gobble.collection import Collection
from gobble.config import EXAMPLES_DIR
from gobble.validation import Validator
collection = Collection(EXAMPLES_DIR, flavour='fiscal')
packages = collection.packages
is_valid = map(lambda path: 'invalid' not in path, collection.filepaths)
# noinspection PyShadowingNames
@mark.parametrize('package, is_valid', zip(packages, is_valid))
def test_assert_validation_correct(package, is_valid):
assert Validator(package).is_valid is is_valid
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_has_timestamp(package):
assert 'timestamp' in Validator(package).report.keys()
assert 'is_valid' in Validator(package).report.keys()
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_is_saved_as_json(package):
Validator(package).save('/tmp/test.json')
assert isinstance(loads(open('/tmp/test.json').read()), dict)
|
<commit_before><commit_msg>Write the tests for the Validation class.<commit_after>"""Test the validation module"""
from pytest import mark
from json import loads
from gobble.collection import Collection
from gobble.config import EXAMPLES_DIR
from gobble.validation import Validator
collection = Collection(EXAMPLES_DIR, flavour='fiscal')
packages = collection.packages
is_valid = map(lambda path: 'invalid' not in path, collection.filepaths)
# noinspection PyShadowingNames
@mark.parametrize('package, is_valid', zip(packages, is_valid))
def test_assert_validation_correct(package, is_valid):
assert Validator(package).is_valid is is_valid
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_has_timestamp(package):
assert 'timestamp' in Validator(package).report.keys()
assert 'is_valid' in Validator(package).report.keys()
# noinspection PyShadowingNames
@mark.parametrize('package', packages)
def test_assert_validation_report_is_saved_as_json(package):
Validator(package).save('/tmp/test.json')
assert isinstance(loads(open('/tmp/test.json').read()), dict)
|
|
d509596cc6990c18fcf77488f22751f77b5e38ef
|
tests/tests/test_init.py
|
tests/tests/test_init.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
import channels
class ChannelsModuleTestCase(TestCase):
def test_load_module(self):
import channels.backends.slack
name = "channels.backends.slack"
module = channels._load_module(name)
self.assertEqual(module, channels.backends.slack)
def test_load_backend(self):
from channels.backends.slack import SlackChannel
name = "channels.backends.slack.SlackChannel"
klass = channels._load_backend(name)
self.assertEqual(klass, SlackChannel)
|
Add some test for channels module
|
Add some test for channels module
|
Python
|
mit
|
ymyzk/django-channels,ymyzk/kawasemi
|
Add some test for channels module
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
import channels
class ChannelsModuleTestCase(TestCase):
def test_load_module(self):
import channels.backends.slack
name = "channels.backends.slack"
module = channels._load_module(name)
self.assertEqual(module, channels.backends.slack)
def test_load_backend(self):
from channels.backends.slack import SlackChannel
name = "channels.backends.slack.SlackChannel"
klass = channels._load_backend(name)
self.assertEqual(klass, SlackChannel)
|
<commit_before><commit_msg>Add some test for channels module<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
import channels
class ChannelsModuleTestCase(TestCase):
def test_load_module(self):
import channels.backends.slack
name = "channels.backends.slack"
module = channels._load_module(name)
self.assertEqual(module, channels.backends.slack)
def test_load_backend(self):
from channels.backends.slack import SlackChannel
name = "channels.backends.slack.SlackChannel"
klass = channels._load_backend(name)
self.assertEqual(klass, SlackChannel)
|
Add some test for channels module# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
import channels
class ChannelsModuleTestCase(TestCase):
def test_load_module(self):
import channels.backends.slack
name = "channels.backends.slack"
module = channels._load_module(name)
self.assertEqual(module, channels.backends.slack)
def test_load_backend(self):
from channels.backends.slack import SlackChannel
name = "channels.backends.slack.SlackChannel"
klass = channels._load_backend(name)
self.assertEqual(klass, SlackChannel)
|
<commit_before><commit_msg>Add some test for channels module<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
import channels
class ChannelsModuleTestCase(TestCase):
def test_load_module(self):
import channels.backends.slack
name = "channels.backends.slack"
module = channels._load_module(name)
self.assertEqual(module, channels.backends.slack)
def test_load_backend(self):
from channels.backends.slack import SlackChannel
name = "channels.backends.slack.SlackChannel"
klass = channels._load_backend(name)
self.assertEqual(klass, SlackChannel)
|
|
b8c3faca98688507554d952e8eb0ee71ee205bd5
|
ps3.py
|
ps3.py
|
"""
Team Challenge: Predicting Turn Types
Authors: Tong, Will, and Ryan
"""
import os
import sys
import csv
import argparse
def read_csv(path):
output = []
with open(path, 'rb') as f:
reader = csv.reader(f, delimiter=",")
for row in reader:
output.append(row)
return output
def read_dir(path):
"""
Takes a path to a directory of csv data files, parses them individually,
and returns an array of the results
"""
output = []
for root, subdirs, files in os.walk(path):
for f in files:
if f.endswith(".csv"):
a_file_path = os.path.join(root, f)
csv = read_csv(a_file_path)
output.append(csv)
return output
def main(args):
data = read_dir(args.data)
print(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-d", "--data", help="pass a folder path to the data")
args = parser.parse_args()
main(args)
|
Read in the directory of CSVs
|
Read in the directory of CSVs
|
Python
|
unlicense
|
dropofwill/nlp-predicting-turn-types
|
Read in the directory of CSVs
|
"""
Team Challenge: Predicting Turn Types
Authors: Tong, Will, and Ryan
"""
import os
import sys
import csv
import argparse
def read_csv(path):
output = []
with open(path, 'rb') as f:
reader = csv.reader(f, delimiter=",")
for row in reader:
output.append(row)
return output
def read_dir(path):
"""
Takes a path to a directory of csv data files, parses them individually,
and returns an array of the results
"""
output = []
for root, subdirs, files in os.walk(path):
for f in files:
if f.endswith(".csv"):
a_file_path = os.path.join(root, f)
csv = read_csv(a_file_path)
output.append(csv)
return output
def main(args):
data = read_dir(args.data)
print(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-d", "--data", help="pass a folder path to the data")
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Read in the directory of CSVs<commit_after>
|
"""
Team Challenge: Predicting Turn Types
Authors: Tong, Will, and Ryan
"""
import os
import sys
import csv
import argparse
def read_csv(path):
output = []
with open(path, 'rb') as f:
reader = csv.reader(f, delimiter=",")
for row in reader:
output.append(row)
return output
def read_dir(path):
"""
Takes a path to a directory of csv data files, parses them individually,
and returns an array of the results
"""
output = []
for root, subdirs, files in os.walk(path):
for f in files:
if f.endswith(".csv"):
a_file_path = os.path.join(root, f)
csv = read_csv(a_file_path)
output.append(csv)
return output
def main(args):
data = read_dir(args.data)
print(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-d", "--data", help="pass a folder path to the data")
args = parser.parse_args()
main(args)
|
Read in the directory of CSVs"""
Team Challenge: Predicting Turn Types
Authors: Tong, Will, and Ryan
"""
import os
import sys
import csv
import argparse
def read_csv(path):
output = []
with open(path, 'rb') as f:
reader = csv.reader(f, delimiter=",")
for row in reader:
output.append(row)
return output
def read_dir(path):
"""
Takes a path to a directory of csv data files, parses them individually,
and returns an array of the results
"""
output = []
for root, subdirs, files in os.walk(path):
for f in files:
if f.endswith(".csv"):
a_file_path = os.path.join(root, f)
csv = read_csv(a_file_path)
output.append(csv)
return output
def main(args):
data = read_dir(args.data)
print(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-d", "--data", help="pass a folder path to the data")
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Read in the directory of CSVs<commit_after>"""
Team Challenge: Predicting Turn Types
Authors: Tong, Will, and Ryan
"""
import os
import sys
import csv
import argparse
def read_csv(path):
output = []
with open(path, 'rb') as f:
reader = csv.reader(f, delimiter=",")
for row in reader:
output.append(row)
return output
def read_dir(path):
"""
Takes a path to a directory of csv data files, parses them individually,
and returns an array of the results
"""
output = []
for root, subdirs, files in os.walk(path):
for f in files:
if f.endswith(".csv"):
a_file_path = os.path.join(root, f)
csv = read_csv(a_file_path)
output.append(csv)
return output
def main(args):
data = read_dir(args.data)
print(data)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument( "-d", "--data", help="pass a folder path to the data")
args = parser.parse_args()
main(args)
|
|
3bf03b8fee78d6950b4e3cec875392a70b2d577f
|
grokapi/cli.py
|
grokapi/cli.py
|
# -*- coding: utf-8 -*-
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
Add commande-line interface for grokapi
|
Add commande-line interface for grokapi
Using an ArgumentParser
Closes #4
|
Python
|
mit
|
Commonists/Grokapi
|
Add commande-line interface for grokapi
Using an ArgumentParser
Closes #4
|
# -*- coding: utf-8 -*-
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add commande-line interface for grokapi
Using an ArgumentParser
Closes #4<commit_after>
|
# -*- coding: utf-8 -*-
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
Add commande-line interface for grokapi
Using an ArgumentParser
Closes #4# -*- coding: utf-8 -*-
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add commande-line interface for grokapi
Using an ArgumentParser
Closes #4<commit_after># -*- coding: utf-8 -*-
from queries import Grok
def print_monthly_views(site, pages, year, month):
grok = Grok(site)
for page in pages:
result = grok.get_views_for_month(page, year, month)
print result['daily_views']
def main():
""" main script. """
from argparse import ArgumentParser
description = 'Extract traffic statistics of Wikipedia articles.'
parser = ArgumentParser(description=description)
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
default="en",
required=True,
help="Language code for Wikipedia")
parser.add_argument("-y", "--year",
type=int,
dest="year",
default="en",
required=True,
help="Year")
parser.add_argument("-m", "--month",
type=int,
dest="month",
default="en",
required=True,
help="Month")
parser.add_argument("page", nargs='*',
metavar="PAGE",
help='A list of pages')
args = parser.parse_args()
print_monthly_views(args.lang, args.page, args.year, args.month)
if __name__ == '__main__':
main()
|
|
8ed003a193f721dc0f076171ee11e9ecbb4f6fed
|
run_story.py
|
run_story.py
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
Implement a simple launcher for ZVM, to help test the machine.
|
Implement a simple launcher for ZVM, to help test the machine.
|
Python
|
bsd-3-clause
|
sussman/zvm,sussman/zvm
|
Implement a simple launcher for ZVM, to help test the machine.
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Implement a simple launcher for ZVM, to help test the machine.<commit_after>
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
Implement a simple launcher for ZVM, to help test the machine.#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Implement a simple launcher for ZVM, to help test the machine.<commit_after>#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
|
083f28cf3af37864a6638ac4a0c1361f65094de2
|
list_objects.py
|
list_objects.py
|
#!/usr/bin/env python2
import git
import sys
def main(args):
if len(args) != 1:
sys.stderr.write("Need path to the repo\n")
sys.exit(1)
repo = git.Repo(args[0], odbt=git.GitCmdObjectDB)
vmap = set()
i = 0
for c in repo.iter_commits('--all'):
traverse(c.tree, vmap)
i += 1
if i % 5000 == 0:
sys.stderr.write("Scanned %d revisions\n" % i)
def traverse(t, vmap):
if t.hexsha in vmap:
return
vmap.add(t.hexsha)
# print out unseen blobs
for b in t.blobs:
if b.hexsha not in vmap and b.size > 102400:
vmap.add(b.hexsha)
print('%s %8d %s' % (b.hexsha, b.size/1024, b.path))
# and sub-trees
for st in t.trees:
traverse(st, vmap)
if __name__ == '__main__':
main(sys.argv[1:])
|
Add a tool to list all large objects in the repo
|
Add a tool to list all large objects in the repo
|
Python
|
mit
|
usovalx/bzr-export
|
Add a tool to list all large objects in the repo
|
#!/usr/bin/env python2
import git
import sys
def main(args):
if len(args) != 1:
sys.stderr.write("Need path to the repo\n")
sys.exit(1)
repo = git.Repo(args[0], odbt=git.GitCmdObjectDB)
vmap = set()
i = 0
for c in repo.iter_commits('--all'):
traverse(c.tree, vmap)
i += 1
if i % 5000 == 0:
sys.stderr.write("Scanned %d revisions\n" % i)
def traverse(t, vmap):
if t.hexsha in vmap:
return
vmap.add(t.hexsha)
# print out unseen blobs
for b in t.blobs:
if b.hexsha not in vmap and b.size > 102400:
vmap.add(b.hexsha)
print('%s %8d %s' % (b.hexsha, b.size/1024, b.path))
# and sub-trees
for st in t.trees:
traverse(st, vmap)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before><commit_msg>Add a tool to list all large objects in the repo<commit_after>
|
#!/usr/bin/env python2
import git
import sys
def main(args):
if len(args) != 1:
sys.stderr.write("Need path to the repo\n")
sys.exit(1)
repo = git.Repo(args[0], odbt=git.GitCmdObjectDB)
vmap = set()
i = 0
for c in repo.iter_commits('--all'):
traverse(c.tree, vmap)
i += 1
if i % 5000 == 0:
sys.stderr.write("Scanned %d revisions\n" % i)
def traverse(t, vmap):
if t.hexsha in vmap:
return
vmap.add(t.hexsha)
# print out unseen blobs
for b in t.blobs:
if b.hexsha not in vmap and b.size > 102400:
vmap.add(b.hexsha)
print('%s %8d %s' % (b.hexsha, b.size/1024, b.path))
# and sub-trees
for st in t.trees:
traverse(st, vmap)
if __name__ == '__main__':
main(sys.argv[1:])
|
Add a tool to list all large objects in the repo#!/usr/bin/env python2
import git
import sys
def main(args):
if len(args) != 1:
sys.stderr.write("Need path to the repo\n")
sys.exit(1)
repo = git.Repo(args[0], odbt=git.GitCmdObjectDB)
vmap = set()
i = 0
for c in repo.iter_commits('--all'):
traverse(c.tree, vmap)
i += 1
if i % 5000 == 0:
sys.stderr.write("Scanned %d revisions\n" % i)
def traverse(t, vmap):
if t.hexsha in vmap:
return
vmap.add(t.hexsha)
# print out unseen blobs
for b in t.blobs:
if b.hexsha not in vmap and b.size > 102400:
vmap.add(b.hexsha)
print('%s %8d %s' % (b.hexsha, b.size/1024, b.path))
# and sub-trees
for st in t.trees:
traverse(st, vmap)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before><commit_msg>Add a tool to list all large objects in the repo<commit_after>#!/usr/bin/env python2
import git
import sys
def main(args):
if len(args) != 1:
sys.stderr.write("Need path to the repo\n")
sys.exit(1)
repo = git.Repo(args[0], odbt=git.GitCmdObjectDB)
vmap = set()
i = 0
for c in repo.iter_commits('--all'):
traverse(c.tree, vmap)
i += 1
if i % 5000 == 0:
sys.stderr.write("Scanned %d revisions\n" % i)
def traverse(t, vmap):
if t.hexsha in vmap:
return
vmap.add(t.hexsha)
# print out unseen blobs
for b in t.blobs:
if b.hexsha not in vmap and b.size > 102400:
vmap.add(b.hexsha)
print('%s %8d %s' % (b.hexsha, b.size/1024, b.path))
# and sub-trees
for st in t.trees:
traverse(st, vmap)
if __name__ == '__main__':
main(sys.argv[1:])
|
|
138a84d07e050a23989eedfe414fcbb29a7266ab
|
mygpo/users/migrations/0015_case_insensitive_username.py
|
mygpo/users/migrations/0015_case_insensitive_username.py
|
from django.db import migrations
class Migration(migrations.Migration):
""" Create a unique case-insensitive index on the username column """
dependencies = [
('auth', '0001_initial'),
('users', '0014_django_uuidfield'),
]
operations = [
migrations.RunSQL(
'CREATE UNIQUE INDEX user_case_insensitive_unique '
'ON auth_user ((lower(username)));',
'DROP INDEX user_case_insensitive_unique',
),
]
|
Add case-insensitive index for username
|
Add case-insensitive index for username
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
Add case-insensitive index for username
|
from django.db import migrations
class Migration(migrations.Migration):
""" Create a unique case-insensitive index on the username column """
dependencies = [
('auth', '0001_initial'),
('users', '0014_django_uuidfield'),
]
operations = [
migrations.RunSQL(
'CREATE UNIQUE INDEX user_case_insensitive_unique '
'ON auth_user ((lower(username)));',
'DROP INDEX user_case_insensitive_unique',
),
]
|
<commit_before><commit_msg>Add case-insensitive index for username<commit_after>
|
from django.db import migrations
class Migration(migrations.Migration):
""" Create a unique case-insensitive index on the username column """
dependencies = [
('auth', '0001_initial'),
('users', '0014_django_uuidfield'),
]
operations = [
migrations.RunSQL(
'CREATE UNIQUE INDEX user_case_insensitive_unique '
'ON auth_user ((lower(username)));',
'DROP INDEX user_case_insensitive_unique',
),
]
|
Add case-insensitive index for usernamefrom django.db import migrations
class Migration(migrations.Migration):
""" Create a unique case-insensitive index on the username column """
dependencies = [
('auth', '0001_initial'),
('users', '0014_django_uuidfield'),
]
operations = [
migrations.RunSQL(
'CREATE UNIQUE INDEX user_case_insensitive_unique '
'ON auth_user ((lower(username)));',
'DROP INDEX user_case_insensitive_unique',
),
]
|
<commit_before><commit_msg>Add case-insensitive index for username<commit_after>from django.db import migrations
class Migration(migrations.Migration):
""" Create a unique case-insensitive index on the username column """
dependencies = [
('auth', '0001_initial'),
('users', '0014_django_uuidfield'),
]
operations = [
migrations.RunSQL(
'CREATE UNIQUE INDEX user_case_insensitive_unique '
'ON auth_user ((lower(username)));',
'DROP INDEX user_case_insensitive_unique',
),
]
|
|
fd1e1cf80d8ca0bbbf9f17951c54615c39bde80d
|
test/test_extrusion_entities.py
|
test/test_extrusion_entities.py
|
# -*- coding: utf-8 -*-
"""Create several entities by extrusion, check that the expected
sub-entities are returned and the resulting mesh is correct.
"""
import pygmsh
import numpy as np
def test():
kernels = [pygmsh.built_in, pygmsh.opencascade]
for kernel in kernels:
geom = kernel.Geometry()
p = geom.add_point([0, 0, 0], 1)
p_top, _, _ = geom.extrude(p, translation_axis=[1, 0, 0])
# The mesh should now contain exactly two points,
# the second one should be where the translation pointed.
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 2
assert np.array_equal(points[-1], [1, 0, 0])
# Check that the top entity (a PointBase) can be extruded correctly
# again.
_, _, _ = geom.extrude(p_top, translation_axis=[1, 0, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 3
assert np.array_equal(points[-1], [2, 0, 0])
# Set up new geometry with one line.
geom = kernel.Geometry()
p1 = geom.add_point([0, 0, 0], 1)
p2 = geom.add_point([1, 0, 0], 1)
line = geom.add_line(p1, p2)
l_top, _, _ = geom.extrude(line, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 5
assert np.array_equal(points[-2], [1, 1, 0])
# Check again for top entity (a LineBase).
_, _, _ = geom.extrude(l_top, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 8
assert np.array_equal(points[-3], [1, 2, 0])
|
Add extrusion test for points and lines
|
Add extrusion test for points and lines
|
Python
|
bsd-3-clause
|
nschloe/python4gmsh
|
Add extrusion test for points and lines
|
# -*- coding: utf-8 -*-
"""Create several entities by extrusion, check that the expected
sub-entities are returned and the resulting mesh is correct.
"""
import pygmsh
import numpy as np
def test():
kernels = [pygmsh.built_in, pygmsh.opencascade]
for kernel in kernels:
geom = kernel.Geometry()
p = geom.add_point([0, 0, 0], 1)
p_top, _, _ = geom.extrude(p, translation_axis=[1, 0, 0])
# The mesh should now contain exactly two points,
# the second one should be where the translation pointed.
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 2
assert np.array_equal(points[-1], [1, 0, 0])
# Check that the top entity (a PointBase) can be extruded correctly
# again.
_, _, _ = geom.extrude(p_top, translation_axis=[1, 0, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 3
assert np.array_equal(points[-1], [2, 0, 0])
# Set up new geometry with one line.
geom = kernel.Geometry()
p1 = geom.add_point([0, 0, 0], 1)
p2 = geom.add_point([1, 0, 0], 1)
line = geom.add_line(p1, p2)
l_top, _, _ = geom.extrude(line, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 5
assert np.array_equal(points[-2], [1, 1, 0])
# Check again for top entity (a LineBase).
_, _, _ = geom.extrude(l_top, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 8
assert np.array_equal(points[-3], [1, 2, 0])
|
<commit_before><commit_msg>Add extrusion test for points and lines<commit_after>
|
# -*- coding: utf-8 -*-
"""Create several entities by extrusion, check that the expected
sub-entities are returned and the resulting mesh is correct.
"""
import pygmsh
import numpy as np
def test():
kernels = [pygmsh.built_in, pygmsh.opencascade]
for kernel in kernels:
geom = kernel.Geometry()
p = geom.add_point([0, 0, 0], 1)
p_top, _, _ = geom.extrude(p, translation_axis=[1, 0, 0])
# The mesh should now contain exactly two points,
# the second one should be where the translation pointed.
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 2
assert np.array_equal(points[-1], [1, 0, 0])
# Check that the top entity (a PointBase) can be extruded correctly
# again.
_, _, _ = geom.extrude(p_top, translation_axis=[1, 0, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 3
assert np.array_equal(points[-1], [2, 0, 0])
# Set up new geometry with one line.
geom = kernel.Geometry()
p1 = geom.add_point([0, 0, 0], 1)
p2 = geom.add_point([1, 0, 0], 1)
line = geom.add_line(p1, p2)
l_top, _, _ = geom.extrude(line, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 5
assert np.array_equal(points[-2], [1, 1, 0])
# Check again for top entity (a LineBase).
_, _, _ = geom.extrude(l_top, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 8
assert np.array_equal(points[-3], [1, 2, 0])
|
Add extrusion test for points and lines# -*- coding: utf-8 -*-
"""Create several entities by extrusion, check that the expected
sub-entities are returned and the resulting mesh is correct.
"""
import pygmsh
import numpy as np
def test():
kernels = [pygmsh.built_in, pygmsh.opencascade]
for kernel in kernels:
geom = kernel.Geometry()
p = geom.add_point([0, 0, 0], 1)
p_top, _, _ = geom.extrude(p, translation_axis=[1, 0, 0])
# The mesh should now contain exactly two points,
# the second one should be where the translation pointed.
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 2
assert np.array_equal(points[-1], [1, 0, 0])
# Check that the top entity (a PointBase) can be extruded correctly
# again.
_, _, _ = geom.extrude(p_top, translation_axis=[1, 0, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 3
assert np.array_equal(points[-1], [2, 0, 0])
# Set up new geometry with one line.
geom = kernel.Geometry()
p1 = geom.add_point([0, 0, 0], 1)
p2 = geom.add_point([1, 0, 0], 1)
line = geom.add_line(p1, p2)
l_top, _, _ = geom.extrude(line, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 5
assert np.array_equal(points[-2], [1, 1, 0])
# Check again for top entity (a LineBase).
_, _, _ = geom.extrude(l_top, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 8
assert np.array_equal(points[-3], [1, 2, 0])
|
<commit_before><commit_msg>Add extrusion test for points and lines<commit_after># -*- coding: utf-8 -*-
"""Create several entities by extrusion, check that the expected
sub-entities are returned and the resulting mesh is correct.
"""
import pygmsh
import numpy as np
def test():
kernels = [pygmsh.built_in, pygmsh.opencascade]
for kernel in kernels:
geom = kernel.Geometry()
p = geom.add_point([0, 0, 0], 1)
p_top, _, _ = geom.extrude(p, translation_axis=[1, 0, 0])
# The mesh should now contain exactly two points,
# the second one should be where the translation pointed.
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 2
assert np.array_equal(points[-1], [1, 0, 0])
# Check that the top entity (a PointBase) can be extruded correctly
# again.
_, _, _ = geom.extrude(p_top, translation_axis=[1, 0, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 3
assert np.array_equal(points[-1], [2, 0, 0])
# Set up new geometry with one line.
geom = kernel.Geometry()
p1 = geom.add_point([0, 0, 0], 1)
p2 = geom.add_point([1, 0, 0], 1)
line = geom.add_line(p1, p2)
l_top, _, _ = geom.extrude(line, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 5
assert np.array_equal(points[-2], [1, 1, 0])
# Check again for top entity (a LineBase).
_, _, _ = geom.extrude(l_top, [0, 1, 0])
points, _, _, _, _ = pygmsh.generate_mesh(geom)
assert len(points) == 8
assert np.array_equal(points[-3], [1, 2, 0])
|
|
dd21cb62d08d5f293b04426afbdb256f386ee17f
|
test/ut/test_plugin_verbatim.py
|
test/ut/test_plugin_verbatim.py
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.verbatim as verbatim
class UTTestPluginText(unittest.TestCase):
def test_processVerbatimTagXHTML(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "xhtml")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n#include <stdlib.h>\n#include <stdio.h>\n\nint main(int argc, char *argv[])\n{\n printf("Hello World!\n");\n}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
def test_processVerbatimTagLatex(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n\\#{}include <stdlib.h>\n\\#{}include <stdio.h>\n\nint main(int argc, char *argv{[}{]})\n\\{{}\n printf({}{"}{}Hello World{}{!}{}\n{}{"}{}){}{;}{}\n\\}{}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
Add test for verbatim plugin
|
Add test for verbatim plugin
|
Python
|
mit
|
tobijk/ecromedos,tobijk/ecromedos
|
Add test for verbatim plugin
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.verbatim as verbatim
class UTTestPluginText(unittest.TestCase):
def test_processVerbatimTagXHTML(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "xhtml")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n#include <stdlib.h>\n#include <stdio.h>\n\nint main(int argc, char *argv[])\n{\n printf("Hello World!\n");\n}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
def test_processVerbatimTagLatex(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n\\#{}include <stdlib.h>\n\\#{}include <stdio.h>\n\nint main(int argc, char *argv{[}{]})\n\\{{}\n printf({}{"}{}Hello World{}{!}{}\n{}{"}{}){}{;}{}\n\\}{}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for verbatim plugin<commit_after>
|
#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.verbatim as verbatim
class UTTestPluginText(unittest.TestCase):
def test_processVerbatimTagXHTML(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "xhtml")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n#include <stdlib.h>\n#include <stdio.h>\n\nint main(int argc, char *argv[])\n{\n printf("Hello World!\n");\n}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
def test_processVerbatimTagLatex(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n\\#{}include <stdlib.h>\n\\#{}include <stdio.h>\n\nint main(int argc, char *argv{[}{]})\n\\{{}\n printf({}{"}{}Hello World{}{!}{}\n{}{"}{}){}{;}{}\n\\}{}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
Add test for verbatim plugin#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.verbatim as verbatim
class UTTestPluginText(unittest.TestCase):
def test_processVerbatimTagXHTML(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "xhtml")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n#include <stdlib.h>\n#include <stdio.h>\n\nint main(int argc, char *argv[])\n{\n printf("Hello World!\n");\n}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
def test_processVerbatimTagLatex(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n\\#{}include <stdlib.h>\n\\#{}include <stdio.h>\n\nint main(int argc, char *argv{[}{]})\n\\{{}\n printf({}{"}{}Hello World{}{!}{}\n{}{"}{}){}{;}{}\n\\}{}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for verbatim plugin<commit_after>#!/usr/bin/env python3
#-*- encoding: utf-8 -*-
import os, sys, tempfile, unittest
import lxml.etree as etree
ECMDS_INSTALL_DIR = os.path.normpath(os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])),
"..", ".."
))
sys.path.insert(1, ECMDS_INSTALL_DIR + os.sep + 'lib')
from net.ecromedos.error import ECMDSPluginError
import net.ecromedos.plugins.verbatim as verbatim
class UTTestPluginText(unittest.TestCase):
def test_processVerbatimTagXHTML(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "xhtml")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n#include <stdlib.h>\n#include <stdio.h>\n\nint main(int argc, char *argv[])\n{\n printf("Hello World!\n");\n}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
def test_processVerbatimTagLatex(self):
content = """
<root>
<verbatim><![CDATA[
#include <stdlib.h>
#include <stdio.h>
int main(int argc, char *argv[])
{
\tprintf("Hello World!\n");
}
]]></verbatim>
</root>
"""
root = etree.fromstring(content)
plugin = verbatim.getInstance({})
plugin.process(root.find('./verbatim'), "latex")
plugin.flush()
tree = etree.ElementTree(element=root)
result = etree.tostring(tree)
expected_result = b'<root>\n <verbatim>\n\\#{}include <stdlib.h>\n\\#{}include <stdio.h>\n\nint main(int argc, char *argv{[}{]})\n\\{{}\n printf({}{"}{}Hello World{}{!}{}\n{}{"}{}){}{;}{}\n\\}{}\n </verbatim>\n</root>'
self.assertEqual(result, expected_result)
#end function
#end class
if __name__ == "__main__":
unittest.main()
|
|
c1cbdb300bd18b2d376f51f8ed368f2e166fa798
|
gen/code.rs.py
|
gen/code.rs.py
|
import parser
def gen_header():
print "// This file was generated automatically."
print "// See the gen/ folder at the project root."
print
print "use std::fmt;"
print "use std::str;"
def gen_enum(codes):
print "#[derive(Clone, Copy, Debug, Eq, PartialEq)]"
print "pub enum Code {"
for code in codes:
print " " + code.format_code + ","
print "}"
def gen_methods(codes):
print "impl Code {"
print
print " pub fn is_reply(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.reply: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print " pub fn is_error(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.error: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print "}"
def gen_display(codes):
print "impl fmt::Display for Code {"
print
print " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {"
print " let text = match *self {"
for code in codes:
print " Code::" + code.format_code + " => " + code.format_value + ","
print " };"
print " f.write_str(text)"
print " }"
print
print "}"
def gen_fromstr(codes):
print "impl str::FromStr for Code {"
print " type Err = ();"
print
print " fn from_str(s: &str) -> Result<Code, ()> {"
print " let code = match s {"
for code in codes:
print " " + code.format_value + " => Code::" + code.format_code + ","
print " _ => return Err(()),"
print " };"
print " Ok(code)"
print " }"
print "}"
if __name__ == '__main__':
codes = parser.parse("codes.txt")
gen_header()
print
gen_enum(codes)
print
gen_methods(codes)
print
gen_display(codes)
print
gen_fromstr(codes)
print
|
Add a generator for the Code enum
|
Add a generator for the Code enum
|
Python
|
mit
|
celavy/irc,SBSTP/loirc,celavy/irc,SBSTP/loirc,SBSTP/loirc,celavy/irc
|
Add a generator for the Code enum
|
import parser
def gen_header():
print "// This file was generated automatically."
print "// See the gen/ folder at the project root."
print
print "use std::fmt;"
print "use std::str;"
def gen_enum(codes):
print "#[derive(Clone, Copy, Debug, Eq, PartialEq)]"
print "pub enum Code {"
for code in codes:
print " " + code.format_code + ","
print "}"
def gen_methods(codes):
print "impl Code {"
print
print " pub fn is_reply(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.reply: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print " pub fn is_error(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.error: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print "}"
def gen_display(codes):
print "impl fmt::Display for Code {"
print
print " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {"
print " let text = match *self {"
for code in codes:
print " Code::" + code.format_code + " => " + code.format_value + ","
print " };"
print " f.write_str(text)"
print " }"
print
print "}"
def gen_fromstr(codes):
print "impl str::FromStr for Code {"
print " type Err = ();"
print
print " fn from_str(s: &str) -> Result<Code, ()> {"
print " let code = match s {"
for code in codes:
print " " + code.format_value + " => Code::" + code.format_code + ","
print " _ => return Err(()),"
print " };"
print " Ok(code)"
print " }"
print "}"
if __name__ == '__main__':
codes = parser.parse("codes.txt")
gen_header()
print
gen_enum(codes)
print
gen_methods(codes)
print
gen_display(codes)
print
gen_fromstr(codes)
print
|
<commit_before><commit_msg>Add a generator for the Code enum<commit_after>
|
import parser
def gen_header():
print "// This file was generated automatically."
print "// See the gen/ folder at the project root."
print
print "use std::fmt;"
print "use std::str;"
def gen_enum(codes):
print "#[derive(Clone, Copy, Debug, Eq, PartialEq)]"
print "pub enum Code {"
for code in codes:
print " " + code.format_code + ","
print "}"
def gen_methods(codes):
print "impl Code {"
print
print " pub fn is_reply(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.reply: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print " pub fn is_error(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.error: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print "}"
def gen_display(codes):
print "impl fmt::Display for Code {"
print
print " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {"
print " let text = match *self {"
for code in codes:
print " Code::" + code.format_code + " => " + code.format_value + ","
print " };"
print " f.write_str(text)"
print " }"
print
print "}"
def gen_fromstr(codes):
print "impl str::FromStr for Code {"
print " type Err = ();"
print
print " fn from_str(s: &str) -> Result<Code, ()> {"
print " let code = match s {"
for code in codes:
print " " + code.format_value + " => Code::" + code.format_code + ","
print " _ => return Err(()),"
print " };"
print " Ok(code)"
print " }"
print "}"
if __name__ == '__main__':
codes = parser.parse("codes.txt")
gen_header()
print
gen_enum(codes)
print
gen_methods(codes)
print
gen_display(codes)
print
gen_fromstr(codes)
print
|
Add a generator for the Code enumimport parser
def gen_header():
print "// This file was generated automatically."
print "// See the gen/ folder at the project root."
print
print "use std::fmt;"
print "use std::str;"
def gen_enum(codes):
print "#[derive(Clone, Copy, Debug, Eq, PartialEq)]"
print "pub enum Code {"
for code in codes:
print " " + code.format_code + ","
print "}"
def gen_methods(codes):
print "impl Code {"
print
print " pub fn is_reply(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.reply: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print " pub fn is_error(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.error: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print "}"
def gen_display(codes):
print "impl fmt::Display for Code {"
print
print " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {"
print " let text = match *self {"
for code in codes:
print " Code::" + code.format_code + " => " + code.format_value + ","
print " };"
print " f.write_str(text)"
print " }"
print
print "}"
def gen_fromstr(codes):
print "impl str::FromStr for Code {"
print " type Err = ();"
print
print " fn from_str(s: &str) -> Result<Code, ()> {"
print " let code = match s {"
for code in codes:
print " " + code.format_value + " => Code::" + code.format_code + ","
print " _ => return Err(()),"
print " };"
print " Ok(code)"
print " }"
print "}"
if __name__ == '__main__':
codes = parser.parse("codes.txt")
gen_header()
print
gen_enum(codes)
print
gen_methods(codes)
print
gen_display(codes)
print
gen_fromstr(codes)
print
|
<commit_before><commit_msg>Add a generator for the Code enum<commit_after>import parser
def gen_header():
print "// This file was generated automatically."
print "// See the gen/ folder at the project root."
print
print "use std::fmt;"
print "use std::str;"
def gen_enum(codes):
print "#[derive(Clone, Copy, Debug, Eq, PartialEq)]"
print "pub enum Code {"
for code in codes:
print " " + code.format_code + ","
print "}"
def gen_methods(codes):
print "impl Code {"
print
print " pub fn is_reply(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.reply: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print " pub fn is_error(&self) -> bool {"
print " match *self {"
for code in codes:
if not code.error: continue
print " Code::" + code.format_code + " => true,"
print " _ => false,"
print " }"
print " }"
print
print "}"
def gen_display(codes):
print "impl fmt::Display for Code {"
print
print " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {"
print " let text = match *self {"
for code in codes:
print " Code::" + code.format_code + " => " + code.format_value + ","
print " };"
print " f.write_str(text)"
print " }"
print
print "}"
def gen_fromstr(codes):
print "impl str::FromStr for Code {"
print " type Err = ();"
print
print " fn from_str(s: &str) -> Result<Code, ()> {"
print " let code = match s {"
for code in codes:
print " " + code.format_value + " => Code::" + code.format_code + ","
print " _ => return Err(()),"
print " };"
print " Ok(code)"
print " }"
print "}"
if __name__ == '__main__':
codes = parser.parse("codes.txt")
gen_header()
print
gen_enum(codes)
print
gen_methods(codes)
print
gen_display(codes)
print
gen_fromstr(codes)
print
|
|
5b139fce10c1451e3d7df2f73fe63cebfd983fdb
|
host-test/host-sample.py
|
host-test/host-sample.py
|
#!/usr/local/bin/python
import sys
import struct
import json
def send(msg):
sys.stdout.write(struct.pack("=I", len(msg)))
sys.stdout.write(msg)
sys.stdout.flush()
def invalid_argument(msg):
send(json.dumps({'result': 'invalid_argument', 'message': msg}))
def fail(msg):
invalid_argument(msg)
sys.exit(1)
def respond(req, resp):
resp["nonce"] = req["nonce"]
if "result" not in resp:
resp["result"] = "ok"
send(json.dumps(resp))
if __name__ == "__main__":
while True:
try:
# read input length
lenbytes = sys.stdin.read(4)
length = struct.unpack("=I", lenbytes)[0]
if length == 0 or length > 8 *1024:
fail("Bad length")
# read input
req = json.loads(sys.stdin.read(length))
# required fields
if not all (k in req for k in ("nonce","origin","type")):
fail("Required fields missing")
# process messages
if req["type"] == "VERSION":
respond(req, {"version": "0.0.34"})
elif req["type"] == "CERT":
if not "lang" in req:
invalid_argument("lang missing")
else:
respond(req, {"cert": "00112233445566778899"})
elif req["type"] == "SIGN":
if not all (k in req for k in ("hash","cert","lang")):
invalid_argument("hash or cert or lang missing")
else:
respond(req, {"signature": "00112233445566778899"})
else:
invalid_argument("unknown type")
except Exception as e:
print >> sys.stderr, "Exception", e
fail("Unhandled exception")
|
Add a sample mock host implementation in Python
|
Add a sample mock host implementation in Python
|
Python
|
lgpl-2.1
|
cristiano-andrade/chrome-token-signing,metsma/chrome-token-signing,fabiorusso/chrome-token-signing,open-eid/chrome-token-signing,metsma/chrome-token-signing,open-eid/chrome-token-signing,cristiano-andrade/chrome-token-signing,open-eid/chrome-token-signing,fabiorusso/chrome-token-signing,metsma/chrome-token-signing,fabiorusso/chrome-token-signing,metsma/chrome-token-signing,cristiano-andrade/chrome-token-signing,open-eid/chrome-token-signing,open-eid/chrome-token-signing,metsma/chrome-token-signing,fabiorusso/chrome-token-signing,cristiano-andrade/chrome-token-signing
|
Add a sample mock host implementation in Python
|
#!/usr/local/bin/python
import sys
import struct
import json
def send(msg):
sys.stdout.write(struct.pack("=I", len(msg)))
sys.stdout.write(msg)
sys.stdout.flush()
def invalid_argument(msg):
send(json.dumps({'result': 'invalid_argument', 'message': msg}))
def fail(msg):
invalid_argument(msg)
sys.exit(1)
def respond(req, resp):
resp["nonce"] = req["nonce"]
if "result" not in resp:
resp["result"] = "ok"
send(json.dumps(resp))
if __name__ == "__main__":
while True:
try:
# read input length
lenbytes = sys.stdin.read(4)
length = struct.unpack("=I", lenbytes)[0]
if length == 0 or length > 8 *1024:
fail("Bad length")
# read input
req = json.loads(sys.stdin.read(length))
# required fields
if not all (k in req for k in ("nonce","origin","type")):
fail("Required fields missing")
# process messages
if req["type"] == "VERSION":
respond(req, {"version": "0.0.34"})
elif req["type"] == "CERT":
if not "lang" in req:
invalid_argument("lang missing")
else:
respond(req, {"cert": "00112233445566778899"})
elif req["type"] == "SIGN":
if not all (k in req for k in ("hash","cert","lang")):
invalid_argument("hash or cert or lang missing")
else:
respond(req, {"signature": "00112233445566778899"})
else:
invalid_argument("unknown type")
except Exception as e:
print >> sys.stderr, "Exception", e
fail("Unhandled exception")
|
<commit_before><commit_msg>Add a sample mock host implementation in Python<commit_after>
|
#!/usr/local/bin/python
import sys
import struct
import json
def send(msg):
sys.stdout.write(struct.pack("=I", len(msg)))
sys.stdout.write(msg)
sys.stdout.flush()
def invalid_argument(msg):
send(json.dumps({'result': 'invalid_argument', 'message': msg}))
def fail(msg):
invalid_argument(msg)
sys.exit(1)
def respond(req, resp):
resp["nonce"] = req["nonce"]
if "result" not in resp:
resp["result"] = "ok"
send(json.dumps(resp))
if __name__ == "__main__":
while True:
try:
# read input length
lenbytes = sys.stdin.read(4)
length = struct.unpack("=I", lenbytes)[0]
if length == 0 or length > 8 *1024:
fail("Bad length")
# read input
req = json.loads(sys.stdin.read(length))
# required fields
if not all (k in req for k in ("nonce","origin","type")):
fail("Required fields missing")
# process messages
if req["type"] == "VERSION":
respond(req, {"version": "0.0.34"})
elif req["type"] == "CERT":
if not "lang" in req:
invalid_argument("lang missing")
else:
respond(req, {"cert": "00112233445566778899"})
elif req["type"] == "SIGN":
if not all (k in req for k in ("hash","cert","lang")):
invalid_argument("hash or cert or lang missing")
else:
respond(req, {"signature": "00112233445566778899"})
else:
invalid_argument("unknown type")
except Exception as e:
print >> sys.stderr, "Exception", e
fail("Unhandled exception")
|
Add a sample mock host implementation in Python#!/usr/local/bin/python
import sys
import struct
import json
def send(msg):
sys.stdout.write(struct.pack("=I", len(msg)))
sys.stdout.write(msg)
sys.stdout.flush()
def invalid_argument(msg):
send(json.dumps({'result': 'invalid_argument', 'message': msg}))
def fail(msg):
invalid_argument(msg)
sys.exit(1)
def respond(req, resp):
resp["nonce"] = req["nonce"]
if "result" not in resp:
resp["result"] = "ok"
send(json.dumps(resp))
if __name__ == "__main__":
while True:
try:
# read input length
lenbytes = sys.stdin.read(4)
length = struct.unpack("=I", lenbytes)[0]
if length == 0 or length > 8 *1024:
fail("Bad length")
# read input
req = json.loads(sys.stdin.read(length))
# required fields
if not all (k in req for k in ("nonce","origin","type")):
fail("Required fields missing")
# process messages
if req["type"] == "VERSION":
respond(req, {"version": "0.0.34"})
elif req["type"] == "CERT":
if not "lang" in req:
invalid_argument("lang missing")
else:
respond(req, {"cert": "00112233445566778899"})
elif req["type"] == "SIGN":
if not all (k in req for k in ("hash","cert","lang")):
invalid_argument("hash or cert or lang missing")
else:
respond(req, {"signature": "00112233445566778899"})
else:
invalid_argument("unknown type")
except Exception as e:
print >> sys.stderr, "Exception", e
fail("Unhandled exception")
|
<commit_before><commit_msg>Add a sample mock host implementation in Python<commit_after>#!/usr/local/bin/python
import sys
import struct
import json
def send(msg):
sys.stdout.write(struct.pack("=I", len(msg)))
sys.stdout.write(msg)
sys.stdout.flush()
def invalid_argument(msg):
send(json.dumps({'result': 'invalid_argument', 'message': msg}))
def fail(msg):
invalid_argument(msg)
sys.exit(1)
def respond(req, resp):
resp["nonce"] = req["nonce"]
if "result" not in resp:
resp["result"] = "ok"
send(json.dumps(resp))
if __name__ == "__main__":
while True:
try:
# read input length
lenbytes = sys.stdin.read(4)
length = struct.unpack("=I", lenbytes)[0]
if length == 0 or length > 8 *1024:
fail("Bad length")
# read input
req = json.loads(sys.stdin.read(length))
# required fields
if not all (k in req for k in ("nonce","origin","type")):
fail("Required fields missing")
# process messages
if req["type"] == "VERSION":
respond(req, {"version": "0.0.34"})
elif req["type"] == "CERT":
if not "lang" in req:
invalid_argument("lang missing")
else:
respond(req, {"cert": "00112233445566778899"})
elif req["type"] == "SIGN":
if not all (k in req for k in ("hash","cert","lang")):
invalid_argument("hash or cert or lang missing")
else:
respond(req, {"signature": "00112233445566778899"})
else:
invalid_argument("unknown type")
except Exception as e:
print >> sys.stderr, "Exception", e
fail("Unhandled exception")
|
|
2b45fc6a3e1c5b98bceece7917de9699036b77d7
|
hbmqtt/utils.py
|
hbmqtt/utils.py
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
def bytes_to_hex_str(data):
return '0x' + ''.join(format(b, '02x') for b in data)
def hex_to_int(data):
return int.from_bytes(data, byteorder='big')
|
Add some byte conversion utilities
|
Add some byte conversion utilities
|
Python
|
mit
|
beerfactory/hbmqtt
|
Add some byte conversion utilities
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
def bytes_to_hex_str(data):
return '0x' + ''.join(format(b, '02x') for b in data)
def hex_to_int(data):
return int.from_bytes(data, byteorder='big')
|
<commit_before><commit_msg>Add some byte conversion utilities<commit_after>
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
def bytes_to_hex_str(data):
return '0x' + ''.join(format(b, '02x') for b in data)
def hex_to_int(data):
return int.from_bytes(data, byteorder='big')
|
Add some byte conversion utilities# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
def bytes_to_hex_str(data):
return '0x' + ''.join(format(b, '02x') for b in data)
def hex_to_int(data):
return int.from_bytes(data, byteorder='big')
|
<commit_before><commit_msg>Add some byte conversion utilities<commit_after># Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
def bytes_to_hex_str(data):
return '0x' + ''.join(format(b, '02x') for b in data)
def hex_to_int(data):
return int.from_bytes(data, byteorder='big')
|
|
5c479bbb873bd200dda12c292e51612a8e9f26ed
|
examples/plot_data.py
|
examples/plot_data.py
|
import hexgrid
import numpy as np
import matplotlib.pyplot as plt
points = hexgrid.create_spiral(hexgrid.HexPoints(0, 0, 0), 4)
points.data['id'] = np.arange(len(points))
points.data['random'] = np.random.normal(size=len(points))
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(9, 3)
for ax in (ax1, ax2):
ax.set_axis_off()
ax.set_xlabel('')
ax.set_ylabel('')
p1 = hexgrid.plot_hexagons(points, key='id', ax=ax1)
fig.colorbar(p1, ax=ax1)
p2 = hexgrid.plot_hexagons(points, key='random', ax=ax2)
fig.colorbar(p2, ax=ax2)
fig.tight_layout()
plt.show()
|
Add example for plotting data
|
Add example for plotting data
|
Python
|
mit
|
MaxNoe/pyhexgrid
|
Add example for plotting data
|
import hexgrid
import numpy as np
import matplotlib.pyplot as plt
points = hexgrid.create_spiral(hexgrid.HexPoints(0, 0, 0), 4)
points.data['id'] = np.arange(len(points))
points.data['random'] = np.random.normal(size=len(points))
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(9, 3)
for ax in (ax1, ax2):
ax.set_axis_off()
ax.set_xlabel('')
ax.set_ylabel('')
p1 = hexgrid.plot_hexagons(points, key='id', ax=ax1)
fig.colorbar(p1, ax=ax1)
p2 = hexgrid.plot_hexagons(points, key='random', ax=ax2)
fig.colorbar(p2, ax=ax2)
fig.tight_layout()
plt.show()
|
<commit_before><commit_msg>Add example for plotting data<commit_after>
|
import hexgrid
import numpy as np
import matplotlib.pyplot as plt
points = hexgrid.create_spiral(hexgrid.HexPoints(0, 0, 0), 4)
points.data['id'] = np.arange(len(points))
points.data['random'] = np.random.normal(size=len(points))
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(9, 3)
for ax in (ax1, ax2):
ax.set_axis_off()
ax.set_xlabel('')
ax.set_ylabel('')
p1 = hexgrid.plot_hexagons(points, key='id', ax=ax1)
fig.colorbar(p1, ax=ax1)
p2 = hexgrid.plot_hexagons(points, key='random', ax=ax2)
fig.colorbar(p2, ax=ax2)
fig.tight_layout()
plt.show()
|
Add example for plotting dataimport hexgrid
import numpy as np
import matplotlib.pyplot as plt
points = hexgrid.create_spiral(hexgrid.HexPoints(0, 0, 0), 4)
points.data['id'] = np.arange(len(points))
points.data['random'] = np.random.normal(size=len(points))
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(9, 3)
for ax in (ax1, ax2):
ax.set_axis_off()
ax.set_xlabel('')
ax.set_ylabel('')
p1 = hexgrid.plot_hexagons(points, key='id', ax=ax1)
fig.colorbar(p1, ax=ax1)
p2 = hexgrid.plot_hexagons(points, key='random', ax=ax2)
fig.colorbar(p2, ax=ax2)
fig.tight_layout()
plt.show()
|
<commit_before><commit_msg>Add example for plotting data<commit_after>import hexgrid
import numpy as np
import matplotlib.pyplot as plt
points = hexgrid.create_spiral(hexgrid.HexPoints(0, 0, 0), 4)
points.data['id'] = np.arange(len(points))
points.data['random'] = np.random.normal(size=len(points))
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(9, 3)
for ax in (ax1, ax2):
ax.set_axis_off()
ax.set_xlabel('')
ax.set_ylabel('')
p1 = hexgrid.plot_hexagons(points, key='id', ax=ax1)
fig.colorbar(p1, ax=ax1)
p2 = hexgrid.plot_hexagons(points, key='random', ax=ax2)
fig.colorbar(p2, ax=ax2)
fig.tight_layout()
plt.show()
|
|
6962ba127dc2e4551901131426395fcfb3f53cee
|
extra/docker_tools.py
|
extra/docker_tools.py
|
import subprocess
import sys
def task_docker_ready():
"""Check that docker can be executed inside the container."""
def _docker_ready():
try:
subprocess.check_call('docker ps >/dev/null 2>&1', shell=True)
return True
except subprocess.CalledProcessError:
print('Docker not ready!')
print("Check the 'alias doit_in_docker' to see if the docker socket"
' matches the location in your system.')
sys.exit(1)
return {
'uptodate': [_docker_ready],
'actions': ['touch .docker_ready'],
'targets': ['.docker_ready'],
}
def _image_exist(img, cmd='inspect'):
try:
subprocess.check_output(
'docker {cmd} {img} 1>/dev/null 2>&1'.format(cmd=cmd, img=img),
shell=True)
return True
except subprocess.CalledProcessError:
return False
def images_exist(*imgs):
"""Check if the images exist in local index"""
return all(_image_exist(img) for img in imgs)
def remote_images_exist(*imgs):
"""Check if the images exist in remote registry"""
return all(_image_exist(img, cmd='pull') for img in imgs)
|
Add a module with tools for using docker from doit
|
Add a module with tools for using docker from doit
|
Python
|
mit
|
waltermoreira/doit_in_docker,waltermoreira/doit_in_docker
|
Add a module with tools for using docker from doit
|
import subprocess
import sys
def task_docker_ready():
"""Check that docker can be executed inside the container."""
def _docker_ready():
try:
subprocess.check_call('docker ps >/dev/null 2>&1', shell=True)
return True
except subprocess.CalledProcessError:
print('Docker not ready!')
print("Check the 'alias doit_in_docker' to see if the docker socket"
' matches the location in your system.')
sys.exit(1)
return {
'uptodate': [_docker_ready],
'actions': ['touch .docker_ready'],
'targets': ['.docker_ready'],
}
def _image_exist(img, cmd='inspect'):
try:
subprocess.check_output(
'docker {cmd} {img} 1>/dev/null 2>&1'.format(cmd=cmd, img=img),
shell=True)
return True
except subprocess.CalledProcessError:
return False
def images_exist(*imgs):
"""Check if the images exist in local index"""
return all(_image_exist(img) for img in imgs)
def remote_images_exist(*imgs):
"""Check if the images exist in remote registry"""
return all(_image_exist(img, cmd='pull') for img in imgs)
|
<commit_before><commit_msg>Add a module with tools for using docker from doit<commit_after>
|
import subprocess
import sys
def task_docker_ready():
"""Check that docker can be executed inside the container."""
def _docker_ready():
try:
subprocess.check_call('docker ps >/dev/null 2>&1', shell=True)
return True
except subprocess.CalledProcessError:
print('Docker not ready!')
print("Check the 'alias doit_in_docker' to see if the docker socket"
' matches the location in your system.')
sys.exit(1)
return {
'uptodate': [_docker_ready],
'actions': ['touch .docker_ready'],
'targets': ['.docker_ready'],
}
def _image_exist(img, cmd='inspect'):
try:
subprocess.check_output(
'docker {cmd} {img} 1>/dev/null 2>&1'.format(cmd=cmd, img=img),
shell=True)
return True
except subprocess.CalledProcessError:
return False
def images_exist(*imgs):
"""Check if the images exist in local index"""
return all(_image_exist(img) for img in imgs)
def remote_images_exist(*imgs):
"""Check if the images exist in remote registry"""
return all(_image_exist(img, cmd='pull') for img in imgs)
|
Add a module with tools for using docker from doitimport subprocess
import sys
def task_docker_ready():
"""Check that docker can be executed inside the container."""
def _docker_ready():
try:
subprocess.check_call('docker ps >/dev/null 2>&1', shell=True)
return True
except subprocess.CalledProcessError:
print('Docker not ready!')
print("Check the 'alias doit_in_docker' to see if the docker socket"
' matches the location in your system.')
sys.exit(1)
return {
'uptodate': [_docker_ready],
'actions': ['touch .docker_ready'],
'targets': ['.docker_ready'],
}
def _image_exist(img, cmd='inspect'):
try:
subprocess.check_output(
'docker {cmd} {img} 1>/dev/null 2>&1'.format(cmd=cmd, img=img),
shell=True)
return True
except subprocess.CalledProcessError:
return False
def images_exist(*imgs):
"""Check if the images exist in local index"""
return all(_image_exist(img) for img in imgs)
def remote_images_exist(*imgs):
"""Check if the images exist in remote registry"""
return all(_image_exist(img, cmd='pull') for img in imgs)
|
<commit_before><commit_msg>Add a module with tools for using docker from doit<commit_after>import subprocess
import sys
def task_docker_ready():
"""Check that docker can be executed inside the container."""
def _docker_ready():
try:
subprocess.check_call('docker ps >/dev/null 2>&1', shell=True)
return True
except subprocess.CalledProcessError:
print('Docker not ready!')
print("Check the 'alias doit_in_docker' to see if the docker socket"
' matches the location in your system.')
sys.exit(1)
return {
'uptodate': [_docker_ready],
'actions': ['touch .docker_ready'],
'targets': ['.docker_ready'],
}
def _image_exist(img, cmd='inspect'):
try:
subprocess.check_output(
'docker {cmd} {img} 1>/dev/null 2>&1'.format(cmd=cmd, img=img),
shell=True)
return True
except subprocess.CalledProcessError:
return False
def images_exist(*imgs):
"""Check if the images exist in local index"""
return all(_image_exist(img) for img in imgs)
def remote_images_exist(*imgs):
"""Check if the images exist in remote registry"""
return all(_image_exist(img, cmd='pull') for img in imgs)
|
|
b5700995316d999cae9f2284a21d1f373d419b21
|
euler016.py
|
euler016.py
|
#!/usr/bin/python
from math import pow
x = int(pow(2, 1000))
a = str(x)
res = 0
for i in range(len(a)):
res += int(a[i])
print(res)
|
Add solution for problem 16
|
Add solution for problem 16
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 16
|
#!/usr/bin/python
from math import pow
x = int(pow(2, 1000))
a = str(x)
res = 0
for i in range(len(a)):
res += int(a[i])
print(res)
|
<commit_before><commit_msg>Add solution for problem 16<commit_after>
|
#!/usr/bin/python
from math import pow
x = int(pow(2, 1000))
a = str(x)
res = 0
for i in range(len(a)):
res += int(a[i])
print(res)
|
Add solution for problem 16#!/usr/bin/python
from math import pow
x = int(pow(2, 1000))
a = str(x)
res = 0
for i in range(len(a)):
res += int(a[i])
print(res)
|
<commit_before><commit_msg>Add solution for problem 16<commit_after>#!/usr/bin/python
from math import pow
x = int(pow(2, 1000))
a = str(x)
res = 0
for i in range(len(a)):
res += int(a[i])
print(res)
|
|
b5ff1b9f24214d6508ebf58a676b6df10e51a655
|
euler033.py
|
euler033.py
|
#!/usr/bin/python
from fractions import gcd
num = 1
den = 1
for c in range(1, 10):
for b in range(1, c):
for a in range(1, b):
x = a * 10 + c
y = c * 10 + b
if (a / b) == (x / y):
num *= a
den *= b
print(den // gcd(den, num))
|
Add solution for problem 33, after long time...
|
Add solution for problem 33, after long time...
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 33, after long time...
|
#!/usr/bin/python
from fractions import gcd
num = 1
den = 1
for c in range(1, 10):
for b in range(1, c):
for a in range(1, b):
x = a * 10 + c
y = c * 10 + b
if (a / b) == (x / y):
num *= a
den *= b
print(den // gcd(den, num))
|
<commit_before><commit_msg>Add solution for problem 33, after long time...<commit_after>
|
#!/usr/bin/python
from fractions import gcd
num = 1
den = 1
for c in range(1, 10):
for b in range(1, c):
for a in range(1, b):
x = a * 10 + c
y = c * 10 + b
if (a / b) == (x / y):
num *= a
den *= b
print(den // gcd(den, num))
|
Add solution for problem 33, after long time...#!/usr/bin/python
from fractions import gcd
num = 1
den = 1
for c in range(1, 10):
for b in range(1, c):
for a in range(1, b):
x = a * 10 + c
y = c * 10 + b
if (a / b) == (x / y):
num *= a
den *= b
print(den // gcd(den, num))
|
<commit_before><commit_msg>Add solution for problem 33, after long time...<commit_after>#!/usr/bin/python
from fractions import gcd
num = 1
den = 1
for c in range(1, 10):
for b in range(1, c):
for a in range(1, b):
x = a * 10 + c
y = c * 10 + b
if (a / b) == (x / y):
num *= a
den *= b
print(den // gcd(den, num))
|
|
af3222f934a812b5f4a57d3c86f8ca3604f956cc
|
python/ranks.py
|
python/ranks.py
|
"""
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
|
Add score ranking problem and solution
|
Add score ranking problem and solution
|
Python
|
unlicense
|
alanc10n/snippets
|
Add score ranking problem and solution
|
"""
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
|
<commit_before><commit_msg>Add score ranking problem and solution<commit_after>
|
"""
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
|
Add score ranking problem and solution"""
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
|
<commit_before><commit_msg>Add score ranking problem and solution<commit_after>"""
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
|
|
4a17b636fc405e61be19ed21c44b37fdd5371aa2
|
django_vend/stores/forms.py
|
django_vend/stores/forms.py
|
from django import forms
from django_vend.core.forms import VendDateTimeField
from .models import VendOutlet
class VendOutletForm(forms.ModelForm):
deleted_at = VendDateTimeField(required=False)
def __init__(self, data=None, *args, **kwargs):
if data:
uid = data.pop('id', None)
if uid is not None:
data['uid'] = uid
tax_inc = data.pop('display_prices', None)
if tax_inc is not None:
if tax_inc == 'inclusive':
data['display_prices_tax_inclusive'] = True
elif tax_inc == 'exclusive':
data['display_prices_tax_inclusive'] = False
deleted_at = data.get('deleted_at')
if deleted_at is not None and deleted_at == 'null':
data['deleted_at'] = None
super(VendOutletForm, self).__init__(data, *args, **kwargs)
class Meta:
model = VendOutlet
fields = ['uid', 'name', 'time_zone', 'currency', 'currency_symbol',
'display_prices_tax_inclusive', 'deleted_at']
|
Create VendOutletForm to use for validating data received from Vend API
|
Create VendOutletForm to use for validating data received from Vend API
|
Python
|
bsd-3-clause
|
remarkablerocket/django-vend,remarkablerocket/django-vend
|
Create VendOutletForm to use for validating data received from Vend API
|
from django import forms
from django_vend.core.forms import VendDateTimeField
from .models import VendOutlet
class VendOutletForm(forms.ModelForm):
deleted_at = VendDateTimeField(required=False)
def __init__(self, data=None, *args, **kwargs):
if data:
uid = data.pop('id', None)
if uid is not None:
data['uid'] = uid
tax_inc = data.pop('display_prices', None)
if tax_inc is not None:
if tax_inc == 'inclusive':
data['display_prices_tax_inclusive'] = True
elif tax_inc == 'exclusive':
data['display_prices_tax_inclusive'] = False
deleted_at = data.get('deleted_at')
if deleted_at is not None and deleted_at == 'null':
data['deleted_at'] = None
super(VendOutletForm, self).__init__(data, *args, **kwargs)
class Meta:
model = VendOutlet
fields = ['uid', 'name', 'time_zone', 'currency', 'currency_symbol',
'display_prices_tax_inclusive', 'deleted_at']
|
<commit_before><commit_msg>Create VendOutletForm to use for validating data received from Vend API<commit_after>
|
from django import forms
from django_vend.core.forms import VendDateTimeField
from .models import VendOutlet
class VendOutletForm(forms.ModelForm):
deleted_at = VendDateTimeField(required=False)
def __init__(self, data=None, *args, **kwargs):
if data:
uid = data.pop('id', None)
if uid is not None:
data['uid'] = uid
tax_inc = data.pop('display_prices', None)
if tax_inc is not None:
if tax_inc == 'inclusive':
data['display_prices_tax_inclusive'] = True
elif tax_inc == 'exclusive':
data['display_prices_tax_inclusive'] = False
deleted_at = data.get('deleted_at')
if deleted_at is not None and deleted_at == 'null':
data['deleted_at'] = None
super(VendOutletForm, self).__init__(data, *args, **kwargs)
class Meta:
model = VendOutlet
fields = ['uid', 'name', 'time_zone', 'currency', 'currency_symbol',
'display_prices_tax_inclusive', 'deleted_at']
|
Create VendOutletForm to use for validating data received from Vend APIfrom django import forms
from django_vend.core.forms import VendDateTimeField
from .models import VendOutlet
class VendOutletForm(forms.ModelForm):
deleted_at = VendDateTimeField(required=False)
def __init__(self, data=None, *args, **kwargs):
if data:
uid = data.pop('id', None)
if uid is not None:
data['uid'] = uid
tax_inc = data.pop('display_prices', None)
if tax_inc is not None:
if tax_inc == 'inclusive':
data['display_prices_tax_inclusive'] = True
elif tax_inc == 'exclusive':
data['display_prices_tax_inclusive'] = False
deleted_at = data.get('deleted_at')
if deleted_at is not None and deleted_at == 'null':
data['deleted_at'] = None
super(VendOutletForm, self).__init__(data, *args, **kwargs)
class Meta:
model = VendOutlet
fields = ['uid', 'name', 'time_zone', 'currency', 'currency_symbol',
'display_prices_tax_inclusive', 'deleted_at']
|
<commit_before><commit_msg>Create VendOutletForm to use for validating data received from Vend API<commit_after>from django import forms
from django_vend.core.forms import VendDateTimeField
from .models import VendOutlet
class VendOutletForm(forms.ModelForm):
deleted_at = VendDateTimeField(required=False)
def __init__(self, data=None, *args, **kwargs):
if data:
uid = data.pop('id', None)
if uid is not None:
data['uid'] = uid
tax_inc = data.pop('display_prices', None)
if tax_inc is not None:
if tax_inc == 'inclusive':
data['display_prices_tax_inclusive'] = True
elif tax_inc == 'exclusive':
data['display_prices_tax_inclusive'] = False
deleted_at = data.get('deleted_at')
if deleted_at is not None and deleted_at == 'null':
data['deleted_at'] = None
super(VendOutletForm, self).__init__(data, *args, **kwargs)
class Meta:
model = VendOutlet
fields = ['uid', 'name', 'time_zone', 'currency', 'currency_symbol',
'display_prices_tax_inclusive', 'deleted_at']
|
|
53b8d4089299e85caef8b65aaa3b074acb243ccb
|
Next_Greater_Element_I.py
|
Next_Greater_Element_I.py
|
# You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
# The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
# Exampe:
# Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
# Output: [-1,3,-1]
# Explanation:
# For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
# For number 1 in the first array, the next greater number for it in the second array is 3.
# For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
def nextGreaterElement(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
greaterNumbers = []
for i in range(len(findNums)):
index = nums.index(findNums[i])
for x in range(index, len(nums)):
if nums[x] > findNums[i]:
greaterNumbers.append(nums[x])
break
elif x == (len(nums) - 1):
greaterNumbers.append(-1)
return greaterNumbers
findNums = [4,1,2]
nums = [1,2,3,4]
print(nextGreaterElement(findNums, nums))
|
Solve Next Greater Element I
|
Solve Next Greater Element I
|
Python
|
mit
|
Kunal57/Python_Algorithms
|
Solve Next Greater Element I
|
# You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
# The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
# Exampe:
# Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
# Output: [-1,3,-1]
# Explanation:
# For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
# For number 1 in the first array, the next greater number for it in the second array is 3.
# For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
def nextGreaterElement(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
greaterNumbers = []
for i in range(len(findNums)):
index = nums.index(findNums[i])
for x in range(index, len(nums)):
if nums[x] > findNums[i]:
greaterNumbers.append(nums[x])
break
elif x == (len(nums) - 1):
greaterNumbers.append(-1)
return greaterNumbers
findNums = [4,1,2]
nums = [1,2,3,4]
print(nextGreaterElement(findNums, nums))
|
<commit_before><commit_msg>Solve Next Greater Element I<commit_after>
|
# You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
# The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
# Exampe:
# Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
# Output: [-1,3,-1]
# Explanation:
# For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
# For number 1 in the first array, the next greater number for it in the second array is 3.
# For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
def nextGreaterElement(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
greaterNumbers = []
for i in range(len(findNums)):
index = nums.index(findNums[i])
for x in range(index, len(nums)):
if nums[x] > findNums[i]:
greaterNumbers.append(nums[x])
break
elif x == (len(nums) - 1):
greaterNumbers.append(-1)
return greaterNumbers
findNums = [4,1,2]
nums = [1,2,3,4]
print(nextGreaterElement(findNums, nums))
|
Solve Next Greater Element I# You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
# The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
# Exampe:
# Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
# Output: [-1,3,-1]
# Explanation:
# For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
# For number 1 in the first array, the next greater number for it in the second array is 3.
# For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
def nextGreaterElement(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
greaterNumbers = []
for i in range(len(findNums)):
index = nums.index(findNums[i])
for x in range(index, len(nums)):
if nums[x] > findNums[i]:
greaterNumbers.append(nums[x])
break
elif x == (len(nums) - 1):
greaterNumbers.append(-1)
return greaterNumbers
findNums = [4,1,2]
nums = [1,2,3,4]
print(nextGreaterElement(findNums, nums))
|
<commit_before><commit_msg>Solve Next Greater Element I<commit_after># You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
# The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
# Exampe:
# Input: nums1 = [4,1,2], nums2 = [1,3,4,2].
# Output: [-1,3,-1]
# Explanation:
# For number 4 in the first array, you cannot find the next greater number for it in the second array, so output -1.
# For number 1 in the first array, the next greater number for it in the second array is 3.
# For number 2 in the first array, there is no next greater number for it in the second array, so output -1.
def nextGreaterElement(findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
greaterNumbers = []
for i in range(len(findNums)):
index = nums.index(findNums[i])
for x in range(index, len(nums)):
if nums[x] > findNums[i]:
greaterNumbers.append(nums[x])
break
elif x == (len(nums) - 1):
greaterNumbers.append(-1)
return greaterNumbers
findNums = [4,1,2]
nums = [1,2,3,4]
print(nextGreaterElement(findNums, nums))
|
|
8a08a5c40e890a63ee1e6b1d30ad16155245df70
|
mltsp/ext/celeryconfig.py
|
mltsp/ext/celeryconfig.py
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
Use RethinkDB as Celery backend
|
Use RethinkDB as Celery backend
|
Python
|
bsd-3-clause
|
mltsp/mltsp,bnaul/mltsp,bnaul/mltsp,bnaul/mltsp,mltsp/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,acrellin/mltsp,acrellin/mltsp,acrellin/mltsp,acrellin/mltsp,mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,bnaul/mltsp,mltsp/mltsp,bnaul/mltsp
|
Use RethinkDB as Celery backend
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
<commit_before><commit_msg>Use RethinkDB as Celery backend<commit_after>
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
Use RethinkDB as Celery backend#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
<commit_before><commit_msg>Use RethinkDB as Celery backend<commit_after>#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
|
642734303e2a95d156cd01f7ca17f07ee9bc97e6
|
maud/tests_support.py
|
maud/tests_support.py
|
import numpy as np
from numpy import ma
from numpy.random import random
def inputsizes_f2D(f):
l = 3
# 1D input
#x = np.arange(10)
#y = x
#z = random(x.shape)
#h = wmean_2D(x, y, z, l)
# 2D input
x = np.arange(10)
y = np.arange(3)
X, Y = np.meshgrid(x, y)
Z = random(X.shape)
h = f(X, Y, Z, l)
assert Z.shape == h.shape
# 3D input
Z = random([3]+list(X.shape))
h = f(X, Y, Z, l)
assert Z.shape == h.shape
def masked_input_2D(f, N=4):
l = 2*N/3
x = np.arange(N)
y = np.arange(N)
X, Y = np.meshgrid(x, y)
# input ndarray -> output ndarray
Z = np.ones(X.shape)
h = f(X, Y, Z, l=l)
assert type(h) is np.ndarray
# input MA array -> output MA array
Z = ma.array(Z)
h = f(X, Y, Z, l=l)
assert type(h) == ma.MaskedArray
# Input MA and mask==False -> Output MA and mask==False
assert ~h.mask.any()
# Only the masked inputs should return as masked.
Z.mask = ma.getmaskarray(Z)
Z.mask[0, 0] = True
h = f(X, Y, Z, l=l)
assert h[0, 0].mask == True
assert ~h[1:, 1:].mask.any()
def eval_ones(f, x, y, z, l):
h = f(x, y, z, l=l)
assert (h == 1).all()
# Ones masked array with random masked positions
tmp = random(z.shape)
# Select the top 1 third of the positions
thr = np.percentile(tmp, 70)
z = ma.masked_array(z, tmp>=thr)
h = f(x, y, z, l=l)
assert (h == 1).all()
# Masked values should not interfere in the filtered output.
z.data[z.mask==True] = 1e10
h = f(x, y, z, l=l)
assert (h == 1).all()
# With interp, the energy should also be preserved
h = f(x, y, z, l=l, interp=True)
assert (h == 1).all()
|
Set of functions used in the tests.
|
Set of functions used in the tests.
|
Python
|
bsd-3-clause
|
castelao/maud
|
Set of functions used in the tests.
|
import numpy as np
from numpy import ma
from numpy.random import random
def inputsizes_f2D(f):
l = 3
# 1D input
#x = np.arange(10)
#y = x
#z = random(x.shape)
#h = wmean_2D(x, y, z, l)
# 2D input
x = np.arange(10)
y = np.arange(3)
X, Y = np.meshgrid(x, y)
Z = random(X.shape)
h = f(X, Y, Z, l)
assert Z.shape == h.shape
# 3D input
Z = random([3]+list(X.shape))
h = f(X, Y, Z, l)
assert Z.shape == h.shape
def masked_input_2D(f, N=4):
l = 2*N/3
x = np.arange(N)
y = np.arange(N)
X, Y = np.meshgrid(x, y)
# input ndarray -> output ndarray
Z = np.ones(X.shape)
h = f(X, Y, Z, l=l)
assert type(h) is np.ndarray
# input MA array -> output MA array
Z = ma.array(Z)
h = f(X, Y, Z, l=l)
assert type(h) == ma.MaskedArray
# Input MA and mask==False -> Output MA and mask==False
assert ~h.mask.any()
# Only the masked inputs should return as masked.
Z.mask = ma.getmaskarray(Z)
Z.mask[0, 0] = True
h = f(X, Y, Z, l=l)
assert h[0, 0].mask == True
assert ~h[1:, 1:].mask.any()
def eval_ones(f, x, y, z, l):
h = f(x, y, z, l=l)
assert (h == 1).all()
# Ones masked array with random masked positions
tmp = random(z.shape)
# Select the top 1 third of the positions
thr = np.percentile(tmp, 70)
z = ma.masked_array(z, tmp>=thr)
h = f(x, y, z, l=l)
assert (h == 1).all()
# Masked values should not interfere in the filtered output.
z.data[z.mask==True] = 1e10
h = f(x, y, z, l=l)
assert (h == 1).all()
# With interp, the energy should also be preserved
h = f(x, y, z, l=l, interp=True)
assert (h == 1).all()
|
<commit_before><commit_msg>Set of functions used in the tests.<commit_after>
|
import numpy as np
from numpy import ma
from numpy.random import random
def inputsizes_f2D(f):
l = 3
# 1D input
#x = np.arange(10)
#y = x
#z = random(x.shape)
#h = wmean_2D(x, y, z, l)
# 2D input
x = np.arange(10)
y = np.arange(3)
X, Y = np.meshgrid(x, y)
Z = random(X.shape)
h = f(X, Y, Z, l)
assert Z.shape == h.shape
# 3D input
Z = random([3]+list(X.shape))
h = f(X, Y, Z, l)
assert Z.shape == h.shape
def masked_input_2D(f, N=4):
l = 2*N/3
x = np.arange(N)
y = np.arange(N)
X, Y = np.meshgrid(x, y)
# input ndarray -> output ndarray
Z = np.ones(X.shape)
h = f(X, Y, Z, l=l)
assert type(h) is np.ndarray
# input MA array -> output MA array
Z = ma.array(Z)
h = f(X, Y, Z, l=l)
assert type(h) == ma.MaskedArray
# Input MA and mask==False -> Output MA and mask==False
assert ~h.mask.any()
# Only the masked inputs should return as masked.
Z.mask = ma.getmaskarray(Z)
Z.mask[0, 0] = True
h = f(X, Y, Z, l=l)
assert h[0, 0].mask == True
assert ~h[1:, 1:].mask.any()
def eval_ones(f, x, y, z, l):
h = f(x, y, z, l=l)
assert (h == 1).all()
# Ones masked array with random masked positions
tmp = random(z.shape)
# Select the top 1 third of the positions
thr = np.percentile(tmp, 70)
z = ma.masked_array(z, tmp>=thr)
h = f(x, y, z, l=l)
assert (h == 1).all()
# Masked values should not interfere in the filtered output.
z.data[z.mask==True] = 1e10
h = f(x, y, z, l=l)
assert (h == 1).all()
# With interp, the energy should also be preserved
h = f(x, y, z, l=l, interp=True)
assert (h == 1).all()
|
Set of functions used in the tests.import numpy as np
from numpy import ma
from numpy.random import random
def inputsizes_f2D(f):
l = 3
# 1D input
#x = np.arange(10)
#y = x
#z = random(x.shape)
#h = wmean_2D(x, y, z, l)
# 2D input
x = np.arange(10)
y = np.arange(3)
X, Y = np.meshgrid(x, y)
Z = random(X.shape)
h = f(X, Y, Z, l)
assert Z.shape == h.shape
# 3D input
Z = random([3]+list(X.shape))
h = f(X, Y, Z, l)
assert Z.shape == h.shape
def masked_input_2D(f, N=4):
l = 2*N/3
x = np.arange(N)
y = np.arange(N)
X, Y = np.meshgrid(x, y)
# input ndarray -> output ndarray
Z = np.ones(X.shape)
h = f(X, Y, Z, l=l)
assert type(h) is np.ndarray
# input MA array -> output MA array
Z = ma.array(Z)
h = f(X, Y, Z, l=l)
assert type(h) == ma.MaskedArray
# Input MA and mask==False -> Output MA and mask==False
assert ~h.mask.any()
# Only the masked inputs should return as masked.
Z.mask = ma.getmaskarray(Z)
Z.mask[0, 0] = True
h = f(X, Y, Z, l=l)
assert h[0, 0].mask == True
assert ~h[1:, 1:].mask.any()
def eval_ones(f, x, y, z, l):
h = f(x, y, z, l=l)
assert (h == 1).all()
# Ones masked array with random masked positions
tmp = random(z.shape)
# Select the top 1 third of the positions
thr = np.percentile(tmp, 70)
z = ma.masked_array(z, tmp>=thr)
h = f(x, y, z, l=l)
assert (h == 1).all()
# Masked values should not interfere in the filtered output.
z.data[z.mask==True] = 1e10
h = f(x, y, z, l=l)
assert (h == 1).all()
# With interp, the energy should also be preserved
h = f(x, y, z, l=l, interp=True)
assert (h == 1).all()
|
<commit_before><commit_msg>Set of functions used in the tests.<commit_after>import numpy as np
from numpy import ma
from numpy.random import random
def inputsizes_f2D(f):
l = 3
# 1D input
#x = np.arange(10)
#y = x
#z = random(x.shape)
#h = wmean_2D(x, y, z, l)
# 2D input
x = np.arange(10)
y = np.arange(3)
X, Y = np.meshgrid(x, y)
Z = random(X.shape)
h = f(X, Y, Z, l)
assert Z.shape == h.shape
# 3D input
Z = random([3]+list(X.shape))
h = f(X, Y, Z, l)
assert Z.shape == h.shape
def masked_input_2D(f, N=4):
l = 2*N/3
x = np.arange(N)
y = np.arange(N)
X, Y = np.meshgrid(x, y)
# input ndarray -> output ndarray
Z = np.ones(X.shape)
h = f(X, Y, Z, l=l)
assert type(h) is np.ndarray
# input MA array -> output MA array
Z = ma.array(Z)
h = f(X, Y, Z, l=l)
assert type(h) == ma.MaskedArray
# Input MA and mask==False -> Output MA and mask==False
assert ~h.mask.any()
# Only the masked inputs should return as masked.
Z.mask = ma.getmaskarray(Z)
Z.mask[0, 0] = True
h = f(X, Y, Z, l=l)
assert h[0, 0].mask == True
assert ~h[1:, 1:].mask.any()
def eval_ones(f, x, y, z, l):
h = f(x, y, z, l=l)
assert (h == 1).all()
# Ones masked array with random masked positions
tmp = random(z.shape)
# Select the top 1 third of the positions
thr = np.percentile(tmp, 70)
z = ma.masked_array(z, tmp>=thr)
h = f(x, y, z, l=l)
assert (h == 1).all()
# Masked values should not interfere in the filtered output.
z.data[z.mask==True] = 1e10
h = f(x, y, z, l=l)
assert (h == 1).all()
# With interp, the energy should also be preserved
h = f(x, y, z, l=l, interp=True)
assert (h == 1).all()
|
|
65838da6a7353ae084a13565ea2dd55368a27176
|
history/migrations/0007_auto_20141026_2348.py
|
history/migrations/0007_auto_20141026_2348.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0006_committeemember_member'),
]
operations = [
migrations.AlterField(
model_name='meetingminutes',
name='meeting_type',
field=models.CharField(default=b'MM', max_length=2, choices=[(b'NI', b'New Initiatives'), (b'MM', b'Main Meetings'), (b'OF', b'Officer Meetings'), (b'AD', b'Advisory Board Meetings'), (b'CM', b'Committee Meeting Minutes')]),
),
]
|
Update meeting minutes model (committees)
|
Update meeting minutes model (committees)
|
Python
|
apache-2.0
|
tbpmig/mig-website,tbpmig/mig-website,tbpmig/mig-website
|
Update meeting minutes model (committees)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0006_committeemember_member'),
]
operations = [
migrations.AlterField(
model_name='meetingminutes',
name='meeting_type',
field=models.CharField(default=b'MM', max_length=2, choices=[(b'NI', b'New Initiatives'), (b'MM', b'Main Meetings'), (b'OF', b'Officer Meetings'), (b'AD', b'Advisory Board Meetings'), (b'CM', b'Committee Meeting Minutes')]),
),
]
|
<commit_before><commit_msg>Update meeting minutes model (committees)<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0006_committeemember_member'),
]
operations = [
migrations.AlterField(
model_name='meetingminutes',
name='meeting_type',
field=models.CharField(default=b'MM', max_length=2, choices=[(b'NI', b'New Initiatives'), (b'MM', b'Main Meetings'), (b'OF', b'Officer Meetings'), (b'AD', b'Advisory Board Meetings'), (b'CM', b'Committee Meeting Minutes')]),
),
]
|
Update meeting minutes model (committees)# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0006_committeemember_member'),
]
operations = [
migrations.AlterField(
model_name='meetingminutes',
name='meeting_type',
field=models.CharField(default=b'MM', max_length=2, choices=[(b'NI', b'New Initiatives'), (b'MM', b'Main Meetings'), (b'OF', b'Officer Meetings'), (b'AD', b'Advisory Board Meetings'), (b'CM', b'Committee Meeting Minutes')]),
),
]
|
<commit_before><commit_msg>Update meeting minutes model (committees)<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('history', '0006_committeemember_member'),
]
operations = [
migrations.AlterField(
model_name='meetingminutes',
name='meeting_type',
field=models.CharField(default=b'MM', max_length=2, choices=[(b'NI', b'New Initiatives'), (b'MM', b'Main Meetings'), (b'OF', b'Officer Meetings'), (b'AD', b'Advisory Board Meetings'), (b'CM', b'Committee Meeting Minutes')]),
),
]
|
|
a778bc11b10aaa8cab1907ea1be43a9334d78ce2
|
modules/mod_chatterbot.py
|
modules/mod_chatterbot.py
|
from modules.module_base import ModuleBase
from bot import Bot
from chatterbot import ChatBot
class ModuleChatterBot(ModuleBase):
def __init__(self, bot):
ModuleBase.__init__(self, bot)
self.name = "ChatterBot"
self.chatbot = ChatBot("Ron Obvious", database='modules/resources/chatterbot.db')
|
Add new module for ChatterBot
|
Add new module for ChatterBot
|
Python
|
mit
|
mamaddeveloper/teleadmin,mamaddeveloper/teleadmin,mamaddeveloper/telegrambot,mamaddeveloper/telegrambot
|
Add new module for ChatterBot
|
from modules.module_base import ModuleBase
from bot import Bot
from chatterbot import ChatBot
class ModuleChatterBot(ModuleBase):
def __init__(self, bot):
ModuleBase.__init__(self, bot)
self.name = "ChatterBot"
self.chatbot = ChatBot("Ron Obvious", database='modules/resources/chatterbot.db')
|
<commit_before><commit_msg>Add new module for ChatterBot<commit_after>
|
from modules.module_base import ModuleBase
from bot import Bot
from chatterbot import ChatBot
class ModuleChatterBot(ModuleBase):
def __init__(self, bot):
ModuleBase.__init__(self, bot)
self.name = "ChatterBot"
self.chatbot = ChatBot("Ron Obvious", database='modules/resources/chatterbot.db')
|
Add new module for ChatterBotfrom modules.module_base import ModuleBase
from bot import Bot
from chatterbot import ChatBot
class ModuleChatterBot(ModuleBase):
def __init__(self, bot):
ModuleBase.__init__(self, bot)
self.name = "ChatterBot"
self.chatbot = ChatBot("Ron Obvious", database='modules/resources/chatterbot.db')
|
<commit_before><commit_msg>Add new module for ChatterBot<commit_after>from modules.module_base import ModuleBase
from bot import Bot
from chatterbot import ChatBot
class ModuleChatterBot(ModuleBase):
def __init__(self, bot):
ModuleBase.__init__(self, bot)
self.name = "ChatterBot"
self.chatbot = ChatBot("Ron Obvious", database='modules/resources/chatterbot.db')
|
|
f584fc98f7cdfe97caeb31d9abc742eafa6cac03
|
software/tests/test_buildtermpages.py
|
software/tests/test_buildtermpages.py
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import sys
import unittest
for path in (os.getcwd(), "software/util", "software/SchemaTerms"):
sys.path.insert(1, path) #Pickup libs from local directories
import buildtermpages
import schemaexamples
import sdoterm
class TestTermFileName(unittest.TestCase):
"""Test the utility function that creates paths for terms."""
def testEmpty(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("")
def testUnicode(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("🟪")
def testUpper(self):
self.assertEqual(
buildtermpages.termFileName("Thingamabob"),
"software/site/terms/types/T/Thingamabob.html")
def testLower(self):
self.assertEqual(
buildtermpages.termFileName("thingamabob"),
"software/site/terms/properties/t/thingamabob.html"
)
def testDigit(self):
self.assertEqual(
buildtermpages.termFileName("4DStatue"),
"software/site/terms/types/4/4DStatue.html"
)
class TestBuildTermPages(unittest.TestCase):
def testTemplateRender(self):
"""Test rendering of one term page."""
examples = [
schemaexamples.Example(
terms=["Thingamabob"], original_html="<b>Thingamabob</b>", microdata="", rdfa="", jsonld="",
exmeta={"file": __file__, "filepos": 0 })
]
json = "[42]"
term = sdoterm.SdoTerm(termType=sdoterm.SdoTerm.TYPE, Id=42, uri="http://example.com/thingamabob", label="Thingamabob")
output = buildtermpages.termtemplateRender(term=term, examples=examples, json=json)
self.assertRegex(output, ".*Thingamabob.*")
self.assertRegex(output, ".*http://example\.com/thingamabob.*")
|
Speed up generation of term pages.
|
Speed up generation of term pages.
|
Python
|
apache-2.0
|
schemaorg/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg,schemaorg/schemaorg
|
Speed up generation of term pages.
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import sys
import unittest
for path in (os.getcwd(), "software/util", "software/SchemaTerms"):
sys.path.insert(1, path) #Pickup libs from local directories
import buildtermpages
import schemaexamples
import sdoterm
class TestTermFileName(unittest.TestCase):
"""Test the utility function that creates paths for terms."""
def testEmpty(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("")
def testUnicode(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("🟪")
def testUpper(self):
self.assertEqual(
buildtermpages.termFileName("Thingamabob"),
"software/site/terms/types/T/Thingamabob.html")
def testLower(self):
self.assertEqual(
buildtermpages.termFileName("thingamabob"),
"software/site/terms/properties/t/thingamabob.html"
)
def testDigit(self):
self.assertEqual(
buildtermpages.termFileName("4DStatue"),
"software/site/terms/types/4/4DStatue.html"
)
class TestBuildTermPages(unittest.TestCase):
def testTemplateRender(self):
"""Test rendering of one term page."""
examples = [
schemaexamples.Example(
terms=["Thingamabob"], original_html="<b>Thingamabob</b>", microdata="", rdfa="", jsonld="",
exmeta={"file": __file__, "filepos": 0 })
]
json = "[42]"
term = sdoterm.SdoTerm(termType=sdoterm.SdoTerm.TYPE, Id=42, uri="http://example.com/thingamabob", label="Thingamabob")
output = buildtermpages.termtemplateRender(term=term, examples=examples, json=json)
self.assertRegex(output, ".*Thingamabob.*")
self.assertRegex(output, ".*http://example\.com/thingamabob.*")
|
<commit_before><commit_msg>Speed up generation of term pages.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import sys
import unittest
for path in (os.getcwd(), "software/util", "software/SchemaTerms"):
sys.path.insert(1, path) #Pickup libs from local directories
import buildtermpages
import schemaexamples
import sdoterm
class TestTermFileName(unittest.TestCase):
"""Test the utility function that creates paths for terms."""
def testEmpty(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("")
def testUnicode(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("🟪")
def testUpper(self):
self.assertEqual(
buildtermpages.termFileName("Thingamabob"),
"software/site/terms/types/T/Thingamabob.html")
def testLower(self):
self.assertEqual(
buildtermpages.termFileName("thingamabob"),
"software/site/terms/properties/t/thingamabob.html"
)
def testDigit(self):
self.assertEqual(
buildtermpages.termFileName("4DStatue"),
"software/site/terms/types/4/4DStatue.html"
)
class TestBuildTermPages(unittest.TestCase):
def testTemplateRender(self):
"""Test rendering of one term page."""
examples = [
schemaexamples.Example(
terms=["Thingamabob"], original_html="<b>Thingamabob</b>", microdata="", rdfa="", jsonld="",
exmeta={"file": __file__, "filepos": 0 })
]
json = "[42]"
term = sdoterm.SdoTerm(termType=sdoterm.SdoTerm.TYPE, Id=42, uri="http://example.com/thingamabob", label="Thingamabob")
output = buildtermpages.termtemplateRender(term=term, examples=examples, json=json)
self.assertRegex(output, ".*Thingamabob.*")
self.assertRegex(output, ".*http://example\.com/thingamabob.*")
|
Speed up generation of term pages.#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import sys
import unittest
for path in (os.getcwd(), "software/util", "software/SchemaTerms"):
sys.path.insert(1, path) #Pickup libs from local directories
import buildtermpages
import schemaexamples
import sdoterm
class TestTermFileName(unittest.TestCase):
"""Test the utility function that creates paths for terms."""
def testEmpty(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("")
def testUnicode(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("🟪")
def testUpper(self):
self.assertEqual(
buildtermpages.termFileName("Thingamabob"),
"software/site/terms/types/T/Thingamabob.html")
def testLower(self):
self.assertEqual(
buildtermpages.termFileName("thingamabob"),
"software/site/terms/properties/t/thingamabob.html"
)
def testDigit(self):
self.assertEqual(
buildtermpages.termFileName("4DStatue"),
"software/site/terms/types/4/4DStatue.html"
)
class TestBuildTermPages(unittest.TestCase):
def testTemplateRender(self):
"""Test rendering of one term page."""
examples = [
schemaexamples.Example(
terms=["Thingamabob"], original_html="<b>Thingamabob</b>", microdata="", rdfa="", jsonld="",
exmeta={"file": __file__, "filepos": 0 })
]
json = "[42]"
term = sdoterm.SdoTerm(termType=sdoterm.SdoTerm.TYPE, Id=42, uri="http://example.com/thingamabob", label="Thingamabob")
output = buildtermpages.termtemplateRender(term=term, examples=examples, json=json)
self.assertRegex(output, ".*Thingamabob.*")
self.assertRegex(output, ".*http://example\.com/thingamabob.*")
|
<commit_before><commit_msg>Speed up generation of term pages.<commit_after>#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import os
import sys
import unittest
for path in (os.getcwd(), "software/util", "software/SchemaTerms"):
sys.path.insert(1, path) #Pickup libs from local directories
import buildtermpages
import schemaexamples
import sdoterm
class TestTermFileName(unittest.TestCase):
"""Test the utility function that creates paths for terms."""
def testEmpty(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("")
def testUnicode(self):
with self.assertRaises(ValueError):
buildtermpages.termFileName("🟪")
def testUpper(self):
self.assertEqual(
buildtermpages.termFileName("Thingamabob"),
"software/site/terms/types/T/Thingamabob.html")
def testLower(self):
self.assertEqual(
buildtermpages.termFileName("thingamabob"),
"software/site/terms/properties/t/thingamabob.html"
)
def testDigit(self):
self.assertEqual(
buildtermpages.termFileName("4DStatue"),
"software/site/terms/types/4/4DStatue.html"
)
class TestBuildTermPages(unittest.TestCase):
def testTemplateRender(self):
"""Test rendering of one term page."""
examples = [
schemaexamples.Example(
terms=["Thingamabob"], original_html="<b>Thingamabob</b>", microdata="", rdfa="", jsonld="",
exmeta={"file": __file__, "filepos": 0 })
]
json = "[42]"
term = sdoterm.SdoTerm(termType=sdoterm.SdoTerm.TYPE, Id=42, uri="http://example.com/thingamabob", label="Thingamabob")
output = buildtermpages.termtemplateRender(term=term, examples=examples, json=json)
self.assertRegex(output, ".*Thingamabob.*")
self.assertRegex(output, ".*http://example\.com/thingamabob.*")
|
|
bf4b048280151e1fd83600fa4aa592da187ca229
|
sahara/tests/unit/plugins/ambari/test_common.py
|
sahara/tests/unit/plugins/ambari/test_common.py
|
# Copyright (c) 2017 EasyStack Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.ambari import common
from sahara.tests.unit import base
class AmbariCommonTestCase(base.SaharaTestCase):
def setUp(self):
super(AmbariCommonTestCase, self).setUp()
self.master_ng = mock.Mock()
self.master_ng.node_processes = ['Ambari', 'HiveServer']
self.worker_ng = mock.Mock()
self.worker_ng.node_processes = ['DataNode', 'Oozie']
self.cluster = mock.Mock()
self.cluster.node_groups = [self.master_ng, self.worker_ng]
def test_get_ambari_proc_list(self):
procs = common.get_ambari_proc_list(self.master_ng)
expected = ['METRICS_COLLECTOR', 'HIVE_SERVER',
'MYSQL_SERVER', 'WEBHCAT_SERVER']
self.assertEqual(procs, expected)
procs = common.get_ambari_proc_list(self.worker_ng)
expected = ['DATANODE', 'OOZIE_SERVER', 'PIG']
self.assertEqual(procs, expected)
@mock.patch('sahara.plugins.kerberos.is_kerberos_security_enabled')
def test_get_clients(self, kerberos):
kerberos.return_value = False
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR']
for e in expected:
self.assertIn(e, clients)
kerberos.return_value = True
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR', 'KERBEROS_CLIENT']
for e in expected:
self.assertIn(e, clients)
def test_instances_have_process(self):
instance1 = mock.Mock()
instance2 = mock.Mock()
instance1.node_group = self.master_ng
instance2.node_group = self.worker_ng
self.assertTrue(common.instances_have_process([instance1], "Ambari"))
self.assertTrue(common.instances_have_process([instance1, instance2],
"DataNode"))
self.assertFalse(common.instances_have_process([instance1, instance2],
"DRPC Server"))
|
Add missing tests to plugin ambari
|
Add missing tests to plugin ambari
The tests for plugins ambari is not completed.
Add:
test_get_ambari_proc_list()
test_get_clients()
test_instances_have_process()
Change-Id: Id835131c9715b24adf523a252849143ede711fdf
|
Python
|
apache-2.0
|
openstack/sahara,openstack/sahara
|
Add missing tests to plugin ambari
The tests for plugins ambari is not completed.
Add:
test_get_ambari_proc_list()
test_get_clients()
test_instances_have_process()
Change-Id: Id835131c9715b24adf523a252849143ede711fdf
|
# Copyright (c) 2017 EasyStack Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.ambari import common
from sahara.tests.unit import base
class AmbariCommonTestCase(base.SaharaTestCase):
def setUp(self):
super(AmbariCommonTestCase, self).setUp()
self.master_ng = mock.Mock()
self.master_ng.node_processes = ['Ambari', 'HiveServer']
self.worker_ng = mock.Mock()
self.worker_ng.node_processes = ['DataNode', 'Oozie']
self.cluster = mock.Mock()
self.cluster.node_groups = [self.master_ng, self.worker_ng]
def test_get_ambari_proc_list(self):
procs = common.get_ambari_proc_list(self.master_ng)
expected = ['METRICS_COLLECTOR', 'HIVE_SERVER',
'MYSQL_SERVER', 'WEBHCAT_SERVER']
self.assertEqual(procs, expected)
procs = common.get_ambari_proc_list(self.worker_ng)
expected = ['DATANODE', 'OOZIE_SERVER', 'PIG']
self.assertEqual(procs, expected)
@mock.patch('sahara.plugins.kerberos.is_kerberos_security_enabled')
def test_get_clients(self, kerberos):
kerberos.return_value = False
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR']
for e in expected:
self.assertIn(e, clients)
kerberos.return_value = True
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR', 'KERBEROS_CLIENT']
for e in expected:
self.assertIn(e, clients)
def test_instances_have_process(self):
instance1 = mock.Mock()
instance2 = mock.Mock()
instance1.node_group = self.master_ng
instance2.node_group = self.worker_ng
self.assertTrue(common.instances_have_process([instance1], "Ambari"))
self.assertTrue(common.instances_have_process([instance1, instance2],
"DataNode"))
self.assertFalse(common.instances_have_process([instance1, instance2],
"DRPC Server"))
|
<commit_before><commit_msg>Add missing tests to plugin ambari
The tests for plugins ambari is not completed.
Add:
test_get_ambari_proc_list()
test_get_clients()
test_instances_have_process()
Change-Id: Id835131c9715b24adf523a252849143ede711fdf<commit_after>
|
# Copyright (c) 2017 EasyStack Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.ambari import common
from sahara.tests.unit import base
class AmbariCommonTestCase(base.SaharaTestCase):
def setUp(self):
super(AmbariCommonTestCase, self).setUp()
self.master_ng = mock.Mock()
self.master_ng.node_processes = ['Ambari', 'HiveServer']
self.worker_ng = mock.Mock()
self.worker_ng.node_processes = ['DataNode', 'Oozie']
self.cluster = mock.Mock()
self.cluster.node_groups = [self.master_ng, self.worker_ng]
def test_get_ambari_proc_list(self):
procs = common.get_ambari_proc_list(self.master_ng)
expected = ['METRICS_COLLECTOR', 'HIVE_SERVER',
'MYSQL_SERVER', 'WEBHCAT_SERVER']
self.assertEqual(procs, expected)
procs = common.get_ambari_proc_list(self.worker_ng)
expected = ['DATANODE', 'OOZIE_SERVER', 'PIG']
self.assertEqual(procs, expected)
@mock.patch('sahara.plugins.kerberos.is_kerberos_security_enabled')
def test_get_clients(self, kerberos):
kerberos.return_value = False
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR']
for e in expected:
self.assertIn(e, clients)
kerberos.return_value = True
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR', 'KERBEROS_CLIENT']
for e in expected:
self.assertIn(e, clients)
def test_instances_have_process(self):
instance1 = mock.Mock()
instance2 = mock.Mock()
instance1.node_group = self.master_ng
instance2.node_group = self.worker_ng
self.assertTrue(common.instances_have_process([instance1], "Ambari"))
self.assertTrue(common.instances_have_process([instance1, instance2],
"DataNode"))
self.assertFalse(common.instances_have_process([instance1, instance2],
"DRPC Server"))
|
Add missing tests to plugin ambari
The tests for plugins ambari is not completed.
Add:
test_get_ambari_proc_list()
test_get_clients()
test_instances_have_process()
Change-Id: Id835131c9715b24adf523a252849143ede711fdf# Copyright (c) 2017 EasyStack Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.ambari import common
from sahara.tests.unit import base
class AmbariCommonTestCase(base.SaharaTestCase):
def setUp(self):
super(AmbariCommonTestCase, self).setUp()
self.master_ng = mock.Mock()
self.master_ng.node_processes = ['Ambari', 'HiveServer']
self.worker_ng = mock.Mock()
self.worker_ng.node_processes = ['DataNode', 'Oozie']
self.cluster = mock.Mock()
self.cluster.node_groups = [self.master_ng, self.worker_ng]
def test_get_ambari_proc_list(self):
procs = common.get_ambari_proc_list(self.master_ng)
expected = ['METRICS_COLLECTOR', 'HIVE_SERVER',
'MYSQL_SERVER', 'WEBHCAT_SERVER']
self.assertEqual(procs, expected)
procs = common.get_ambari_proc_list(self.worker_ng)
expected = ['DATANODE', 'OOZIE_SERVER', 'PIG']
self.assertEqual(procs, expected)
@mock.patch('sahara.plugins.kerberos.is_kerberos_security_enabled')
def test_get_clients(self, kerberos):
kerberos.return_value = False
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR']
for e in expected:
self.assertIn(e, clients)
kerberos.return_value = True
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR', 'KERBEROS_CLIENT']
for e in expected:
self.assertIn(e, clients)
def test_instances_have_process(self):
instance1 = mock.Mock()
instance2 = mock.Mock()
instance1.node_group = self.master_ng
instance2.node_group = self.worker_ng
self.assertTrue(common.instances_have_process([instance1], "Ambari"))
self.assertTrue(common.instances_have_process([instance1, instance2],
"DataNode"))
self.assertFalse(common.instances_have_process([instance1, instance2],
"DRPC Server"))
|
<commit_before><commit_msg>Add missing tests to plugin ambari
The tests for plugins ambari is not completed.
Add:
test_get_ambari_proc_list()
test_get_clients()
test_instances_have_process()
Change-Id: Id835131c9715b24adf523a252849143ede711fdf<commit_after># Copyright (c) 2017 EasyStack Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins.ambari import common
from sahara.tests.unit import base
class AmbariCommonTestCase(base.SaharaTestCase):
def setUp(self):
super(AmbariCommonTestCase, self).setUp()
self.master_ng = mock.Mock()
self.master_ng.node_processes = ['Ambari', 'HiveServer']
self.worker_ng = mock.Mock()
self.worker_ng.node_processes = ['DataNode', 'Oozie']
self.cluster = mock.Mock()
self.cluster.node_groups = [self.master_ng, self.worker_ng]
def test_get_ambari_proc_list(self):
procs = common.get_ambari_proc_list(self.master_ng)
expected = ['METRICS_COLLECTOR', 'HIVE_SERVER',
'MYSQL_SERVER', 'WEBHCAT_SERVER']
self.assertEqual(procs, expected)
procs = common.get_ambari_proc_list(self.worker_ng)
expected = ['DATANODE', 'OOZIE_SERVER', 'PIG']
self.assertEqual(procs, expected)
@mock.patch('sahara.plugins.kerberos.is_kerberos_security_enabled')
def test_get_clients(self, kerberos):
kerberos.return_value = False
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR']
for e in expected:
self.assertIn(e, clients)
kerberos.return_value = True
clients = common.get_clients(self.cluster)
expected = ['OOZIE_CLIENT', 'HIVE_CLIENT', 'HDFS_CLIENT',
'TEZ_CLIENT', 'METRICS_MONITOR', 'KERBEROS_CLIENT']
for e in expected:
self.assertIn(e, clients)
def test_instances_have_process(self):
instance1 = mock.Mock()
instance2 = mock.Mock()
instance1.node_group = self.master_ng
instance2.node_group = self.worker_ng
self.assertTrue(common.instances_have_process([instance1], "Ambari"))
self.assertTrue(common.instances_have_process([instance1, instance2],
"DataNode"))
self.assertFalse(common.instances_have_process([instance1, instance2],
"DRPC Server"))
|
|
990f792e67ae9d7c6dd194f08a3812e29fa594ac
|
amivapi/tests/test_hidden_passwords.py
|
amivapi/tests/test_hidden_passwords.py
|
from amivapi.tests import util
class TestHiddenPasswords(util.WebTestNoAuth):
def test_passwords_hidden(self):
user = self.new_user()
response = self.api.get("/users/%i" % user.id,
query_string='projection:{"password":1}',
status_code=200)
self.assertTrue('password' not in response.json)
|
Test if passwords are visible through API
|
Tests: Test if passwords are visible through API
|
Python
|
agpl-3.0
|
amiv-eth/amivapi,amiv-eth/amivapi,amiv-eth/amivapi
|
Tests: Test if passwords are visible through API
|
from amivapi.tests import util
class TestHiddenPasswords(util.WebTestNoAuth):
def test_passwords_hidden(self):
user = self.new_user()
response = self.api.get("/users/%i" % user.id,
query_string='projection:{"password":1}',
status_code=200)
self.assertTrue('password' not in response.json)
|
<commit_before><commit_msg>Tests: Test if passwords are visible through API<commit_after>
|
from amivapi.tests import util
class TestHiddenPasswords(util.WebTestNoAuth):
def test_passwords_hidden(self):
user = self.new_user()
response = self.api.get("/users/%i" % user.id,
query_string='projection:{"password":1}',
status_code=200)
self.assertTrue('password' not in response.json)
|
Tests: Test if passwords are visible through APIfrom amivapi.tests import util
class TestHiddenPasswords(util.WebTestNoAuth):
def test_passwords_hidden(self):
user = self.new_user()
response = self.api.get("/users/%i" % user.id,
query_string='projection:{"password":1}',
status_code=200)
self.assertTrue('password' not in response.json)
|
<commit_before><commit_msg>Tests: Test if passwords are visible through API<commit_after>from amivapi.tests import util
class TestHiddenPasswords(util.WebTestNoAuth):
def test_passwords_hidden(self):
user = self.new_user()
response = self.api.get("/users/%i" % user.id,
query_string='projection:{"password":1}',
status_code=200)
self.assertTrue('password' not in response.json)
|
|
f2e91b0c47560a047789e31c618d3c60de1decb6
|
build/android/pylib/thermal_throttle.py
|
build/android/pylib/thermal_throttle.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
class ThermalThrottle(object):
"""Class to detect and track thermal throttling
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
Currently assumes an OMap device.
"""
def __init__(self, adb):
self._adb = adb
self._throttled = False
def HasBeenThrottled(self):
""" True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled"""
self._ReadLog()
return self._throttled
def _ReadLog(self):
has_been_throttled = False
log = self._adb.RunShellCommand('dmesg -c')
for line in log:
if 'omap_thermal_throttle' in line:
if not self._throttled:
logging.warning('>>> Thermally Throttled')
self._throttled = True
has_been_throttled = True
if 'omap_thermal_unthrottle' in line:
if self._throttled:
logging.warning('>>> Thermally Unthrottled')
self._throttled = False
has_been_throttled = True
return has_been_throttled
|
Add a class for management of thermal throttling
|
Add a class for management of thermal throttling
The new class allows detection of whether the device is now, or has been, thermally throttled, hence allowing rejection of performance results affected by thermal throttling.
BUG=158575
Review URL: https://chromiumcodereview.appspot.com/11352004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@165367 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,dednal/chromium.src,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,jaruba/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,anirudhSK/chromium,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,littlstar/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,ltilve/chromium,patrickm/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,anirudhSK/chromium,Just-D/chromium-1,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,hujiajie/pa-chromium,timopulkkinen/BubbleFish,Jonekee/chromium.src,zcbenz/cefode-chromium,littlstar/chromium.src,markYoungH/chromium.src,nacl-webkit/chrome_deps,markYoungH/chromium.src,timopulkkinen/BubbleFish,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,Just-D/chromium-1,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,markYoungH/chromium.src,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,anirudhSK/chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,mogoweb/chromium-crosswalk,ltilve/chromium,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,dednal/chromium.src,anirudhSK/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,dednal/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,patrickm/chromium.src,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,hujiajie/pa-chromium,littlstar/chromium.src,ChromiumWebApps/chromium,dushu1203/chromium.src,M4sse/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,M4sse/chromium.src,hujiajie/pa-chromium,timopulkkinen/BubbleFish,Chilledheart/chromium,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,anirudhSK/chromium,dednal/chromium.src,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,patrickm/chromium.src,hujiajie/pa-chromium,anirudhSK/chromium,bright-sparks/chromium-spacewalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,markYoungH/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,zcbenz/cefode-chromium,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,patrickm/chromium.src,ondra-novak/chromium.src,timopulkkinen/BubbleFish,jaruba/chromium.src,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,Jonekee/chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,jaruba/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,dushu1203/chromium.src,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,zcbenz/cefode-chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,dushu1203/chromium.src,littlstar/chromium.src,Jonekee/chromium.src
|
Add a class for management of thermal throttling
The new class allows detection of whether the device is now, or has been, thermally throttled, hence allowing rejection of performance results affected by thermal throttling.
BUG=158575
Review URL: https://chromiumcodereview.appspot.com/11352004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@165367 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
class ThermalThrottle(object):
"""Class to detect and track thermal throttling
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
Currently assumes an OMap device.
"""
def __init__(self, adb):
self._adb = adb
self._throttled = False
def HasBeenThrottled(self):
""" True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled"""
self._ReadLog()
return self._throttled
def _ReadLog(self):
has_been_throttled = False
log = self._adb.RunShellCommand('dmesg -c')
for line in log:
if 'omap_thermal_throttle' in line:
if not self._throttled:
logging.warning('>>> Thermally Throttled')
self._throttled = True
has_been_throttled = True
if 'omap_thermal_unthrottle' in line:
if self._throttled:
logging.warning('>>> Thermally Unthrottled')
self._throttled = False
has_been_throttled = True
return has_been_throttled
|
<commit_before><commit_msg>Add a class for management of thermal throttling
The new class allows detection of whether the device is now, or has been, thermally throttled, hence allowing rejection of performance results affected by thermal throttling.
BUG=158575
Review URL: https://chromiumcodereview.appspot.com/11352004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@165367 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
class ThermalThrottle(object):
"""Class to detect and track thermal throttling
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
Currently assumes an OMap device.
"""
def __init__(self, adb):
self._adb = adb
self._throttled = False
def HasBeenThrottled(self):
""" True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled"""
self._ReadLog()
return self._throttled
def _ReadLog(self):
has_been_throttled = False
log = self._adb.RunShellCommand('dmesg -c')
for line in log:
if 'omap_thermal_throttle' in line:
if not self._throttled:
logging.warning('>>> Thermally Throttled')
self._throttled = True
has_been_throttled = True
if 'omap_thermal_unthrottle' in line:
if self._throttled:
logging.warning('>>> Thermally Unthrottled')
self._throttled = False
has_been_throttled = True
return has_been_throttled
|
Add a class for management of thermal throttling
The new class allows detection of whether the device is now, or has been, thermally throttled, hence allowing rejection of performance results affected by thermal throttling.
BUG=158575
Review URL: https://chromiumcodereview.appspot.com/11352004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@165367 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
class ThermalThrottle(object):
"""Class to detect and track thermal throttling
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
Currently assumes an OMap device.
"""
def __init__(self, adb):
self._adb = adb
self._throttled = False
def HasBeenThrottled(self):
""" True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled"""
self._ReadLog()
return self._throttled
def _ReadLog(self):
has_been_throttled = False
log = self._adb.RunShellCommand('dmesg -c')
for line in log:
if 'omap_thermal_throttle' in line:
if not self._throttled:
logging.warning('>>> Thermally Throttled')
self._throttled = True
has_been_throttled = True
if 'omap_thermal_unthrottle' in line:
if self._throttled:
logging.warning('>>> Thermally Unthrottled')
self._throttled = False
has_been_throttled = True
return has_been_throttled
|
<commit_before><commit_msg>Add a class for management of thermal throttling
The new class allows detection of whether the device is now, or has been, thermally throttled, hence allowing rejection of performance results affected by thermal throttling.
BUG=158575
Review URL: https://chromiumcodereview.appspot.com/11352004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@165367 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
class ThermalThrottle(object):
"""Class to detect and track thermal throttling
Usage:
Wait for IsThrottled() to be False before running test
After running test call HasBeenThrottled() to find out if the
test run was affected by thermal throttling.
Currently assumes an OMap device.
"""
def __init__(self, adb):
self._adb = adb
self._throttled = False
def HasBeenThrottled(self):
""" True if there has been any throttling since the last call to
HasBeenThrottled or IsThrottled
"""
return self._ReadLog()
def IsThrottled(self):
"""True if currently throttled"""
self._ReadLog()
return self._throttled
def _ReadLog(self):
has_been_throttled = False
log = self._adb.RunShellCommand('dmesg -c')
for line in log:
if 'omap_thermal_throttle' in line:
if not self._throttled:
logging.warning('>>> Thermally Throttled')
self._throttled = True
has_been_throttled = True
if 'omap_thermal_unthrottle' in line:
if self._throttled:
logging.warning('>>> Thermally Unthrottled')
self._throttled = False
has_been_throttled = True
return has_been_throttled
|
|
2789c7a4436cc7623350eb16b226d07a920107d4
|
mchimp_amorendarrerides.py
|
mchimp_amorendarrerides.py
|
#!/usr/bin/env python
#-*- coding: utf8 -*-
import psycopg2
import config
import dbutils
import codecs
import sys
from consolemsg import step, error, fail, warn
from namespace import namespace as ns
def esPersonaFisica(soci) :
return 0 if soci.nif[2] in "ABCDEFGHJNPQRSUVW" else 1
def ambPuntDeMilers(numero) :
return '{:,}'.format(numero).replace(',','.')
db = psycopg2.connect(**config.psycopg)
with db.cursor() as cursor :
cursor.execute("""\
SELECT DISTINCT gi.id, rpa.email, rp.lang
FROM res_partner AS rp, res_partner_address AS rpa, somenergia_soci AS ss, generationkwh_investment as gi
WHERE rp.id = rpa.partner_id AND
rp.id = ss.partner_id AND
ss.id = gi.member_id AND
gi.purchase_date <= '2015-09-15' AND
gi.active = True
;
""")
print u'\t'.join(unicode(x) for x in [
'id',
'email',
'lang',
])
for line in dbutils.fetchNs(cursor) :
try:
print '\t'.join(
str(x)
.replace('\t',' ')
.replace('\n',' ')
.replace('\r',' ')
for x in [
line.id,
line.email,
line.lang,
])
except Exception as e:
import traceback
error("Error processant soci {}\n{}\n{}".format(
line.nsoci,
e,
"\n".join(traceback.format_stack()),
))
error(ns(cas=line).dump())
|
Add script apologize mortizations not sent
|
Add script apologize mortizations not sent
|
Python
|
agpl-3.0
|
Som-Energia/somenergia-dataexports,Som-Energia/somenergia-dataexports
|
Add script apologize mortizations not sent
|
#!/usr/bin/env python
#-*- coding: utf8 -*-
import psycopg2
import config
import dbutils
import codecs
import sys
from consolemsg import step, error, fail, warn
from namespace import namespace as ns
def esPersonaFisica(soci) :
return 0 if soci.nif[2] in "ABCDEFGHJNPQRSUVW" else 1
def ambPuntDeMilers(numero) :
return '{:,}'.format(numero).replace(',','.')
db = psycopg2.connect(**config.psycopg)
with db.cursor() as cursor :
cursor.execute("""\
SELECT DISTINCT gi.id, rpa.email, rp.lang
FROM res_partner AS rp, res_partner_address AS rpa, somenergia_soci AS ss, generationkwh_investment as gi
WHERE rp.id = rpa.partner_id AND
rp.id = ss.partner_id AND
ss.id = gi.member_id AND
gi.purchase_date <= '2015-09-15' AND
gi.active = True
;
""")
print u'\t'.join(unicode(x) for x in [
'id',
'email',
'lang',
])
for line in dbutils.fetchNs(cursor) :
try:
print '\t'.join(
str(x)
.replace('\t',' ')
.replace('\n',' ')
.replace('\r',' ')
for x in [
line.id,
line.email,
line.lang,
])
except Exception as e:
import traceback
error("Error processant soci {}\n{}\n{}".format(
line.nsoci,
e,
"\n".join(traceback.format_stack()),
))
error(ns(cas=line).dump())
|
<commit_before><commit_msg>Add script apologize mortizations not sent<commit_after>
|
#!/usr/bin/env python
#-*- coding: utf8 -*-
import psycopg2
import config
import dbutils
import codecs
import sys
from consolemsg import step, error, fail, warn
from namespace import namespace as ns
def esPersonaFisica(soci) :
return 0 if soci.nif[2] in "ABCDEFGHJNPQRSUVW" else 1
def ambPuntDeMilers(numero) :
return '{:,}'.format(numero).replace(',','.')
db = psycopg2.connect(**config.psycopg)
with db.cursor() as cursor :
cursor.execute("""\
SELECT DISTINCT gi.id, rpa.email, rp.lang
FROM res_partner AS rp, res_partner_address AS rpa, somenergia_soci AS ss, generationkwh_investment as gi
WHERE rp.id = rpa.partner_id AND
rp.id = ss.partner_id AND
ss.id = gi.member_id AND
gi.purchase_date <= '2015-09-15' AND
gi.active = True
;
""")
print u'\t'.join(unicode(x) for x in [
'id',
'email',
'lang',
])
for line in dbutils.fetchNs(cursor) :
try:
print '\t'.join(
str(x)
.replace('\t',' ')
.replace('\n',' ')
.replace('\r',' ')
for x in [
line.id,
line.email,
line.lang,
])
except Exception as e:
import traceback
error("Error processant soci {}\n{}\n{}".format(
line.nsoci,
e,
"\n".join(traceback.format_stack()),
))
error(ns(cas=line).dump())
|
Add script apologize mortizations not sent#!/usr/bin/env python
#-*- coding: utf8 -*-
import psycopg2
import config
import dbutils
import codecs
import sys
from consolemsg import step, error, fail, warn
from namespace import namespace as ns
def esPersonaFisica(soci) :
return 0 if soci.nif[2] in "ABCDEFGHJNPQRSUVW" else 1
def ambPuntDeMilers(numero) :
return '{:,}'.format(numero).replace(',','.')
db = psycopg2.connect(**config.psycopg)
with db.cursor() as cursor :
cursor.execute("""\
SELECT DISTINCT gi.id, rpa.email, rp.lang
FROM res_partner AS rp, res_partner_address AS rpa, somenergia_soci AS ss, generationkwh_investment as gi
WHERE rp.id = rpa.partner_id AND
rp.id = ss.partner_id AND
ss.id = gi.member_id AND
gi.purchase_date <= '2015-09-15' AND
gi.active = True
;
""")
print u'\t'.join(unicode(x) for x in [
'id',
'email',
'lang',
])
for line in dbutils.fetchNs(cursor) :
try:
print '\t'.join(
str(x)
.replace('\t',' ')
.replace('\n',' ')
.replace('\r',' ')
for x in [
line.id,
line.email,
line.lang,
])
except Exception as e:
import traceback
error("Error processant soci {}\n{}\n{}".format(
line.nsoci,
e,
"\n".join(traceback.format_stack()),
))
error(ns(cas=line).dump())
|
<commit_before><commit_msg>Add script apologize mortizations not sent<commit_after>#!/usr/bin/env python
#-*- coding: utf8 -*-
import psycopg2
import config
import dbutils
import codecs
import sys
from consolemsg import step, error, fail, warn
from namespace import namespace as ns
def esPersonaFisica(soci) :
return 0 if soci.nif[2] in "ABCDEFGHJNPQRSUVW" else 1
def ambPuntDeMilers(numero) :
return '{:,}'.format(numero).replace(',','.')
db = psycopg2.connect(**config.psycopg)
with db.cursor() as cursor :
cursor.execute("""\
SELECT DISTINCT gi.id, rpa.email, rp.lang
FROM res_partner AS rp, res_partner_address AS rpa, somenergia_soci AS ss, generationkwh_investment as gi
WHERE rp.id = rpa.partner_id AND
rp.id = ss.partner_id AND
ss.id = gi.member_id AND
gi.purchase_date <= '2015-09-15' AND
gi.active = True
;
""")
print u'\t'.join(unicode(x) for x in [
'id',
'email',
'lang',
])
for line in dbutils.fetchNs(cursor) :
try:
print '\t'.join(
str(x)
.replace('\t',' ')
.replace('\n',' ')
.replace('\r',' ')
for x in [
line.id,
line.email,
line.lang,
])
except Exception as e:
import traceback
error("Error processant soci {}\n{}\n{}".format(
line.nsoci,
e,
"\n".join(traceback.format_stack()),
))
error(ns(cas=line).dump())
|
|
f58a9c6aff57ccd157d8734b6d89411fc29da706
|
src/poliastro/tests/test_patched_conics.py
|
src/poliastro/tests/test_patched_conics.py
|
# coding: utf-8
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Sun, Mercury, Venus, Earth, Moon, Mars
from poliastro.bodies import Jupiter, Saturn, Uranus, Neptune, Pluto
from poliastro.patched_conics import compute_soi
def test_compute_soi():
# Data from Table A.2., Curtis "Orbital Mechanics for Engineering Students"
data = [
# body, SOI radius (m)
# (Sun, None),
(Mercury, 1.12e8),
(Venus, 6.16e8),
(Earth, 9.25e8),
(Moon, 6.61e7),
(Mars, 5.77e8),
(Jupiter, 4.82e10),
(Saturn, 5.48e10),
(Uranus, 5.18e10),
(Neptune, 8.66e10),
(Pluto, 3.08e9)
]
for row in data:
body, expected_r_SOI = row
expected_r_SOI = expected_r_SOI * u.m
r_SOI = compute_soi(body)
assert_quantity_allclose(r_SOI, expected_r_SOI, rtol=1e-6)
|
Add test to r_SOI computation
|
Add test to r_SOI computation
|
Python
|
mit
|
Juanlu001/poliastro,newlawrence/poliastro,poliastro/poliastro,newlawrence/poliastro,anhiga/poliastro,anhiga/poliastro,Juanlu001/poliastro,anhiga/poliastro,Juanlu001/poliastro,newlawrence/poliastro
|
Add test to r_SOI computation
|
# coding: utf-8
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Sun, Mercury, Venus, Earth, Moon, Mars
from poliastro.bodies import Jupiter, Saturn, Uranus, Neptune, Pluto
from poliastro.patched_conics import compute_soi
def test_compute_soi():
# Data from Table A.2., Curtis "Orbital Mechanics for Engineering Students"
data = [
# body, SOI radius (m)
# (Sun, None),
(Mercury, 1.12e8),
(Venus, 6.16e8),
(Earth, 9.25e8),
(Moon, 6.61e7),
(Mars, 5.77e8),
(Jupiter, 4.82e10),
(Saturn, 5.48e10),
(Uranus, 5.18e10),
(Neptune, 8.66e10),
(Pluto, 3.08e9)
]
for row in data:
body, expected_r_SOI = row
expected_r_SOI = expected_r_SOI * u.m
r_SOI = compute_soi(body)
assert_quantity_allclose(r_SOI, expected_r_SOI, rtol=1e-6)
|
<commit_before><commit_msg>Add test to r_SOI computation<commit_after>
|
# coding: utf-8
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Sun, Mercury, Venus, Earth, Moon, Mars
from poliastro.bodies import Jupiter, Saturn, Uranus, Neptune, Pluto
from poliastro.patched_conics import compute_soi
def test_compute_soi():
# Data from Table A.2., Curtis "Orbital Mechanics for Engineering Students"
data = [
# body, SOI radius (m)
# (Sun, None),
(Mercury, 1.12e8),
(Venus, 6.16e8),
(Earth, 9.25e8),
(Moon, 6.61e7),
(Mars, 5.77e8),
(Jupiter, 4.82e10),
(Saturn, 5.48e10),
(Uranus, 5.18e10),
(Neptune, 8.66e10),
(Pluto, 3.08e9)
]
for row in data:
body, expected_r_SOI = row
expected_r_SOI = expected_r_SOI * u.m
r_SOI = compute_soi(body)
assert_quantity_allclose(r_SOI, expected_r_SOI, rtol=1e-6)
|
Add test to r_SOI computation# coding: utf-8
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Sun, Mercury, Venus, Earth, Moon, Mars
from poliastro.bodies import Jupiter, Saturn, Uranus, Neptune, Pluto
from poliastro.patched_conics import compute_soi
def test_compute_soi():
# Data from Table A.2., Curtis "Orbital Mechanics for Engineering Students"
data = [
# body, SOI radius (m)
# (Sun, None),
(Mercury, 1.12e8),
(Venus, 6.16e8),
(Earth, 9.25e8),
(Moon, 6.61e7),
(Mars, 5.77e8),
(Jupiter, 4.82e10),
(Saturn, 5.48e10),
(Uranus, 5.18e10),
(Neptune, 8.66e10),
(Pluto, 3.08e9)
]
for row in data:
body, expected_r_SOI = row
expected_r_SOI = expected_r_SOI * u.m
r_SOI = compute_soi(body)
assert_quantity_allclose(r_SOI, expected_r_SOI, rtol=1e-6)
|
<commit_before><commit_msg>Add test to r_SOI computation<commit_after># coding: utf-8
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Sun, Mercury, Venus, Earth, Moon, Mars
from poliastro.bodies import Jupiter, Saturn, Uranus, Neptune, Pluto
from poliastro.patched_conics import compute_soi
def test_compute_soi():
# Data from Table A.2., Curtis "Orbital Mechanics for Engineering Students"
data = [
# body, SOI radius (m)
# (Sun, None),
(Mercury, 1.12e8),
(Venus, 6.16e8),
(Earth, 9.25e8),
(Moon, 6.61e7),
(Mars, 5.77e8),
(Jupiter, 4.82e10),
(Saturn, 5.48e10),
(Uranus, 5.18e10),
(Neptune, 8.66e10),
(Pluto, 3.08e9)
]
for row in data:
body, expected_r_SOI = row
expected_r_SOI = expected_r_SOI * u.m
r_SOI = compute_soi(body)
assert_quantity_allclose(r_SOI, expected_r_SOI, rtol=1e-6)
|
|
3be77c10bfc10b78f9074e36f8b7ec2132ad4c04
|
coherence/upnp/core/test/test_action.py
|
coherence/upnp/core/test/test_action.py
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.action}
"""
import time
from twisted.trial import unittest
from twisted.internet import protocol
from twisted.test import proto_helpers
from coherence.upnp.core import action
class NoImplementation: pass
NoImplementation = NoImplementation()
class DummyService:
pass
class TestArguments(unittest.TestCase):
def test_argument(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.get_name(), 'SomeArgument')
self.assertEqual(arg.get_direction(), 'in-and-out')
self.assertEqual(arg.get_state_variable(), 'Brightness')
def test_argument_as_dict(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_dict(),
{'name': 'SomeArgument',
'direction': 'in-and-out',
'related_state_variable': 'Brightness',
})
def test_argument_as_tuple(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_tuples(),
[('Name', 'SomeArgument'),
('Direction', 'in-and-out'),
('Related State Variable', 'Brightness'),
])
|
Add simple test-cases for upnp.core.action.Argument.
|
Add simple test-cases for upnp.core.action.Argument.
|
Python
|
mit
|
furbrain/Coherence,unintended/Cohen,ismaelgaudioso/Coherence,coherence-project/Coherence,ismaelgaudioso/Coherence,coherence-project/Coherence,unintended/Cohen,furbrain/Coherence
|
Add simple test-cases for upnp.core.action.Argument.
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.action}
"""
import time
from twisted.trial import unittest
from twisted.internet import protocol
from twisted.test import proto_helpers
from coherence.upnp.core import action
class NoImplementation: pass
NoImplementation = NoImplementation()
class DummyService:
pass
class TestArguments(unittest.TestCase):
def test_argument(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.get_name(), 'SomeArgument')
self.assertEqual(arg.get_direction(), 'in-and-out')
self.assertEqual(arg.get_state_variable(), 'Brightness')
def test_argument_as_dict(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_dict(),
{'name': 'SomeArgument',
'direction': 'in-and-out',
'related_state_variable': 'Brightness',
})
def test_argument_as_tuple(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_tuples(),
[('Name', 'SomeArgument'),
('Direction', 'in-and-out'),
('Related State Variable', 'Brightness'),
])
|
<commit_before><commit_msg>Add simple test-cases for upnp.core.action.Argument.<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.action}
"""
import time
from twisted.trial import unittest
from twisted.internet import protocol
from twisted.test import proto_helpers
from coherence.upnp.core import action
class NoImplementation: pass
NoImplementation = NoImplementation()
class DummyService:
pass
class TestArguments(unittest.TestCase):
def test_argument(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.get_name(), 'SomeArgument')
self.assertEqual(arg.get_direction(), 'in-and-out')
self.assertEqual(arg.get_state_variable(), 'Brightness')
def test_argument_as_dict(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_dict(),
{'name': 'SomeArgument',
'direction': 'in-and-out',
'related_state_variable': 'Brightness',
})
def test_argument_as_tuple(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_tuples(),
[('Name', 'SomeArgument'),
('Direction', 'in-and-out'),
('Related State Variable', 'Brightness'),
])
|
Add simple test-cases for upnp.core.action.Argument.# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.action}
"""
import time
from twisted.trial import unittest
from twisted.internet import protocol
from twisted.test import proto_helpers
from coherence.upnp.core import action
class NoImplementation: pass
NoImplementation = NoImplementation()
class DummyService:
pass
class TestArguments(unittest.TestCase):
def test_argument(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.get_name(), 'SomeArgument')
self.assertEqual(arg.get_direction(), 'in-and-out')
self.assertEqual(arg.get_state_variable(), 'Brightness')
def test_argument_as_dict(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_dict(),
{'name': 'SomeArgument',
'direction': 'in-and-out',
'related_state_variable': 'Brightness',
})
def test_argument_as_tuple(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_tuples(),
[('Name', 'SomeArgument'),
('Direction', 'in-and-out'),
('Related State Variable', 'Brightness'),
])
|
<commit_before><commit_msg>Add simple test-cases for upnp.core.action.Argument.<commit_after># -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.action}
"""
import time
from twisted.trial import unittest
from twisted.internet import protocol
from twisted.test import proto_helpers
from coherence.upnp.core import action
class NoImplementation: pass
NoImplementation = NoImplementation()
class DummyService:
pass
class TestArguments(unittest.TestCase):
def test_argument(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.get_name(), 'SomeArgument')
self.assertEqual(arg.get_direction(), 'in-and-out')
self.assertEqual(arg.get_state_variable(), 'Brightness')
def test_argument_as_dict(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_dict(),
{'name': 'SomeArgument',
'direction': 'in-and-out',
'related_state_variable': 'Brightness',
})
def test_argument_as_tuple(self):
arg = action.Argument('SomeArgument', 'in-and-out', 'Brightness')
self.assertEqual(arg.as_tuples(),
[('Name', 'SomeArgument'),
('Direction', 'in-and-out'),
('Related State Variable', 'Brightness'),
])
|
|
97a60b5cc41cf6824d45be1e5a1ce48343dced4a
|
apps/feedback/migrations/0002_auto_20150623_2055.py
|
apps/feedback/migrations/0002_auto_20150623_2055.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feedback', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='ratinganswer',
name='answer',
field=models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(b'', b''), (1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')]),
preserve_default=True,
),
]
|
Add that **** strange feedback migration again. 2nd time that has happened.
|
Add that **** strange feedback migration again. 2nd time that has happened.
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
Add that **** strange feedback migration again. 2nd time that has happened.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feedback', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='ratinganswer',
name='answer',
field=models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(b'', b''), (1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add that **** strange feedback migration again. 2nd time that has happened.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feedback', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='ratinganswer',
name='answer',
field=models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(b'', b''), (1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')]),
preserve_default=True,
),
]
|
Add that **** strange feedback migration again. 2nd time that has happened.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feedback', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='ratinganswer',
name='answer',
field=models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(b'', b''), (1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add that **** strange feedback migration again. 2nd time that has happened.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feedback', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='ratinganswer',
name='answer',
field=models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(b'', b''), (1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')]),
preserve_default=True,
),
]
|
|
6be49752c7e6f34c53229c755285a0e140128a0e
|
bidb/buildinfo/buildinfo_submissions/migrations/0002_remove_submissions_without_keys.py
|
bidb/buildinfo/buildinfo_submissions/migrations/0002_remove_submissions_without_keys.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-03 20:30
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == 'default':
return
Submission = apps.get_model('buildinfo_submissions', 'Submission')
Submission.objects.filter(uid='').delete()
class Migration(migrations.Migration):
dependencies = [
('buildinfo_submissions', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
Delete submissions with no (ie. blank) uid
|
Delete submissions with no (ie. blank) uid
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
|
Python
|
agpl-3.0
|
lamby/buildinfo.debian.net,lamby/buildinfo.debian.net
|
Delete submissions with no (ie. blank) uid
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-03 20:30
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == 'default':
return
Submission = apps.get_model('buildinfo_submissions', 'Submission')
Submission.objects.filter(uid='').delete()
class Migration(migrations.Migration):
dependencies = [
('buildinfo_submissions', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
<commit_before><commit_msg>Delete submissions with no (ie. blank) uid
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-03 20:30
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == 'default':
return
Submission = apps.get_model('buildinfo_submissions', 'Submission')
Submission.objects.filter(uid='').delete()
class Migration(migrations.Migration):
dependencies = [
('buildinfo_submissions', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
Delete submissions with no (ie. blank) uid
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-03 20:30
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == 'default':
return
Submission = apps.get_model('buildinfo_submissions', 'Submission')
Submission.objects.filter(uid='').delete()
class Migration(migrations.Migration):
dependencies = [
('buildinfo_submissions', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
<commit_before><commit_msg>Delete submissions with no (ie. blank) uid
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-03 20:30
from __future__ import unicode_literals
from django.db import migrations
def forwards(apps, schema_editor):
if not schema_editor.connection.alias == 'default':
return
Submission = apps.get_model('buildinfo_submissions', 'Submission')
Submission.objects.filter(uid='').delete()
class Migration(migrations.Migration):
dependencies = [
('buildinfo_submissions', '0001_initial'),
]
operations = [
migrations.RunPython(forwards),
]
|
|
fa0842d36dc4df14a41887e7ff2427142b2f993e
|
tests/template_compile_race.py
|
tests/template_compile_race.py
|
import threading
from wolis.test_case import WolisTestCase
from wolis import utils
thread_count = 5
requests_per_thread = 20
stop = False
failed = False
def test_fn(s, case, url):
global stop, failed
for i in range(requests_per_thread):
if stop:
break
s.get(url)
case.assert_successish(s)
if 'Subject:' not in s.response.body:
failed = True
stop = True
class TemplateCompileRaceTest(WolisTestCase):
def skip_test_race(self):
self.login('morpheus', 'morpheus')
url = '/index.php'
self.get(url)
self.assert_successish()
assert 'Index page' in self.response.body
href = self.link_href_by_text('Your first forum')
url = self.response.urljoin(href)
self.get(url)
self.assert_successish()
href = self.link_href_by_href_match(r'mode=post')
url = self.response.urljoin(href)
threads = []
for i in range(thread_count):
session = self._session.copy()
session.config.retry_failed = True
session.config.retry_condition = utils.retry_condition_fn
thread = threading.Thread(target=test_fn, args=(session, self, url))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
global failed
assert not failed
if __name__ == '__main__':
import unittest
unittest.main()
|
Test for template race condition only
|
Test for template race condition only
|
Python
|
bsd-2-clause
|
p/wolis-phpbb,p/wolis-phpbb
|
Test for template race condition only
|
import threading
from wolis.test_case import WolisTestCase
from wolis import utils
thread_count = 5
requests_per_thread = 20
stop = False
failed = False
def test_fn(s, case, url):
global stop, failed
for i in range(requests_per_thread):
if stop:
break
s.get(url)
case.assert_successish(s)
if 'Subject:' not in s.response.body:
failed = True
stop = True
class TemplateCompileRaceTest(WolisTestCase):
def skip_test_race(self):
self.login('morpheus', 'morpheus')
url = '/index.php'
self.get(url)
self.assert_successish()
assert 'Index page' in self.response.body
href = self.link_href_by_text('Your first forum')
url = self.response.urljoin(href)
self.get(url)
self.assert_successish()
href = self.link_href_by_href_match(r'mode=post')
url = self.response.urljoin(href)
threads = []
for i in range(thread_count):
session = self._session.copy()
session.config.retry_failed = True
session.config.retry_condition = utils.retry_condition_fn
thread = threading.Thread(target=test_fn, args=(session, self, url))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
global failed
assert not failed
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before><commit_msg>Test for template race condition only<commit_after>
|
import threading
from wolis.test_case import WolisTestCase
from wolis import utils
thread_count = 5
requests_per_thread = 20
stop = False
failed = False
def test_fn(s, case, url):
global stop, failed
for i in range(requests_per_thread):
if stop:
break
s.get(url)
case.assert_successish(s)
if 'Subject:' not in s.response.body:
failed = True
stop = True
class TemplateCompileRaceTest(WolisTestCase):
def skip_test_race(self):
self.login('morpheus', 'morpheus')
url = '/index.php'
self.get(url)
self.assert_successish()
assert 'Index page' in self.response.body
href = self.link_href_by_text('Your first forum')
url = self.response.urljoin(href)
self.get(url)
self.assert_successish()
href = self.link_href_by_href_match(r'mode=post')
url = self.response.urljoin(href)
threads = []
for i in range(thread_count):
session = self._session.copy()
session.config.retry_failed = True
session.config.retry_condition = utils.retry_condition_fn
thread = threading.Thread(target=test_fn, args=(session, self, url))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
global failed
assert not failed
if __name__ == '__main__':
import unittest
unittest.main()
|
Test for template race condition onlyimport threading
from wolis.test_case import WolisTestCase
from wolis import utils
thread_count = 5
requests_per_thread = 20
stop = False
failed = False
def test_fn(s, case, url):
global stop, failed
for i in range(requests_per_thread):
if stop:
break
s.get(url)
case.assert_successish(s)
if 'Subject:' not in s.response.body:
failed = True
stop = True
class TemplateCompileRaceTest(WolisTestCase):
def skip_test_race(self):
self.login('morpheus', 'morpheus')
url = '/index.php'
self.get(url)
self.assert_successish()
assert 'Index page' in self.response.body
href = self.link_href_by_text('Your first forum')
url = self.response.urljoin(href)
self.get(url)
self.assert_successish()
href = self.link_href_by_href_match(r'mode=post')
url = self.response.urljoin(href)
threads = []
for i in range(thread_count):
session = self._session.copy()
session.config.retry_failed = True
session.config.retry_condition = utils.retry_condition_fn
thread = threading.Thread(target=test_fn, args=(session, self, url))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
global failed
assert not failed
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before><commit_msg>Test for template race condition only<commit_after>import threading
from wolis.test_case import WolisTestCase
from wolis import utils
thread_count = 5
requests_per_thread = 20
stop = False
failed = False
def test_fn(s, case, url):
global stop, failed
for i in range(requests_per_thread):
if stop:
break
s.get(url)
case.assert_successish(s)
if 'Subject:' not in s.response.body:
failed = True
stop = True
class TemplateCompileRaceTest(WolisTestCase):
def skip_test_race(self):
self.login('morpheus', 'morpheus')
url = '/index.php'
self.get(url)
self.assert_successish()
assert 'Index page' in self.response.body
href = self.link_href_by_text('Your first forum')
url = self.response.urljoin(href)
self.get(url)
self.assert_successish()
href = self.link_href_by_href_match(r'mode=post')
url = self.response.urljoin(href)
threads = []
for i in range(thread_count):
session = self._session.copy()
session.config.retry_failed = True
session.config.retry_condition = utils.retry_condition_fn
thread = threading.Thread(target=test_fn, args=(session, self, url))
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
global failed
assert not failed
if __name__ == '__main__':
import unittest
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.