commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
480852bb1dd6796b7fb12e40edc924b9a4dbee60
|
test/test_misc.py
|
test/test_misc.py
|
import unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
|
Add tests to cover no framework, no problem
|
Add tests to cover no framework, no problem
|
Python
|
mpl-2.0
|
undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker
|
Add tests to cover no framework, no problem
|
import unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
|
<commit_before><commit_msg>Add tests to cover no framework, no problem<commit_after>
|
import unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
|
Add tests to cover no framework, no problemimport unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
|
<commit_before><commit_msg>Add tests to cover no framework, no problem<commit_after>import unittest
from .helpers import run_module
class MiscTests(unittest.TestCase):
def setUp(self):
self.name = "benchmarker"
def test_no_framework(self):
with self.assertRaises(Exception):
run_module(self.name)
def test_no_problem(self):
with self.assertRaises(Exception):
run_module(self.name, "--framework=pytorch")
|
|
3b66fbc844b023003420db7a9986811110f55489
|
tests/test_run.py
|
tests/test_run.py
|
import sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
|
Add tests for the run() function
|
Add tests for the run() function
|
Python
|
mit
|
Pythonity/icon-font-to-png
|
Add tests for the run() function
|
import sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
|
<commit_before><commit_msg>Add tests for the run() function<commit_after>
|
import sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
|
Add tests for the run() functionimport sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
|
<commit_before><commit_msg>Add tests for the run() function<commit_after>import sys
import tempfile
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import icon_font_to_png
class TestRun(unittest.TestCase):
def create_css_file(self, contents):
css_file = tempfile.NamedTemporaryFile()
css_file.write(contents.encode('utf-8'))
css_file.flush()
return css_file
def test_usage(self):
orig_stderr = sys.stderr
sys.stderr = StringIO()
self.assertRaises(SystemExit, icon_font_to_png.run,
['icon_font_to_png.py'])
err = sys.stderr.getvalue().strip()
self.assertRegexpMatches(err, '^usage: .*')
sys.stderr = orig_stderr
def test_list(self):
css_file = self.create_css_file(
".foo-xyzzy:before { content: '\\f003' }\n"
".foo-baz:before { content: '\\f002' }\n"
".foo-bar:before { content: '\\f001' }\n"
)
orig_stdout = sys.stdout
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"bar\n"
"baz\n"
"xyzzy\n"
)
sys.stdout = StringIO()
self.assertRaisesRegexp(SystemExit, '^0',
icon_font_to_png.run, ['foo.ttf', css_file.name, 'bar', '--list',
'--keep-prefix'])
out = sys.stdout.getvalue()
self.assertEqual(out,
"foo-bar\n"
"foo-baz\n"
"foo-xyzzy\n"
)
sys.stdout = orig_stdout
if __name__ == '__main__':
unittest.main
|
|
578de6c57f9698c7e273af06d1e815f71269bb18
|
tests/to_debug.py
|
tests/to_debug.py
|
import sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
|
Add a sample python file interesting to debug
|
Add a sample python file interesting to debug
|
Python
|
mit
|
audaxis/ikpdb,audaxis/ikpdb
|
Add a sample python file interesting to debug
|
import sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
|
<commit_before><commit_msg>Add a sample python file interesting to debug<commit_after>
|
import sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
|
Add a sample python file interesting to debugimport sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
|
<commit_before><commit_msg>Add a sample python file interesting to debug<commit_after>import sys
import os
import time
import threading
import ikpdb
TEST_MULTI_THREADING = False
TEST_EXCEPTION_PROPAGATION = False
TEST_POSTMORTEM = True
TEST_SYS_EXIT = 0
TEST_STEPPING = False
# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI
TEST_SET_TRACE = False
TCB = TEST_CONDITIONAL_BREAKPOINT = True
class Worker(object):
def __init__(self):
self._running = True
def terminate(self):
self._running = False
def run(self, n):
work_count = n
while self._running and n > 0:
print "Worker: Doing iteration: %s" % (work_count - n)
if n == 3:
pass # ikpdb.set_trace()
n -= 1
time.sleep(2)
ga = 5
gb ="coucou"
g_dict = {"Genesis": 1, "Don't Look Back": 2, 'array': [1,3,{'coucou': 3.14}]}
a_tuple = (1,'e', 3.14, ['a', 'b'])
class BigBear:
color = "white"
def __init__(self, name='unknown'):
self._name = name
def grumble(self):
print "Roaaarrrrrrr"
def sub_function():
return True
def the_function(p_nb_seconds):
a_var = 18.3
the_function_local_list = [1,2,3,'cyril']
a_beast = BigBear()
print "ga=%s" % ga
print "Hello World"
print "Ceci est la ligne avec le point d'arret"
for loop_idx in range(p_nb_seconds):
print "hello @ %s seconds" % loop_idx
time.sleep(1)
if loop_idx == 12:
if TEST_SET_TRACE:
ikpdb.set_trace() # will break on next line
pass # Need this for set_trace()
a_var = 98.3
sub_function()
def sub_raiser():
raise Exception("Prends ca dans ta bouille")
def raiser():
try:
sub_raiser()
except Exception as e:
raise e
if __name__=='__main__':
b = 0
main_bear = BigBear("Cyril")
print "Type of main_bear=%s" % type(main_bear)
print "sys.argv=%s" % sys.argv
if TEST_SYS_EXIT:
sys.exit(TEST_SYS_EXIT)
if TEST_EXCEPTION_PROPAGATION:
raiser()
if TEST_MULTI_THREADING:
w = Worker()
t = threading.Thread(target=w.run, args=(5,))
t.start()
duration = 2 if TEST_STEPPING else 15
the_function(duration)
if TEST_MULTI_THREADING:
w.terminate()
t.join()
print "finished"
if TEST_POSTMORTEM:
print 5 / b
|
|
f6f2c6fc2a51bb3243d9b99ab1093809a2d1a5bb
|
test_players.py
|
test_players.py
|
from AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score
|
Add script that tests AI players
|
Add script that tests AI players
|
Python
|
mit
|
giovannipcarvalho/dots-and-boxes
|
Add script that tests AI players
|
from AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score
|
<commit_before><commit_msg>Add script that tests AI players<commit_after>
|
from AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score
|
Add script that tests AI playersfrom AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score
|
<commit_before><commit_msg>Add script that tests AI players<commit_after>from AI import *
import random
def RandomPlayer(game):
return 0, random.choice(game.get_available_moves())
def ABPlayer(game):
return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)
def ABChainPlayer1(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)
def ABChainPlayer2(game):
return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)
players = [ABChainPlayer2, ABPlayer]
player_names = tuple((map(lambda x: x.__name__, players)))
print "%s v. %s" % player_names
game = DotsAndBoxes(-1, 4, 4)
while not game.is_over():
play_fn = players[game.turn == 1]
print "\tTurn: %s" % (player_names[game.turn == 1])
score, move = play_fn(game)
game.play(move)
print "\tPlayed: %d %d" % (move)
print "\tEvaluated score: %d\n" % (score)
print "Winner: %s" % (player_names[np.argmax(game.score)])
print game.score
|
|
da2b773bf6e669b3ec50bbd6af73e1d80bb0b5a5
|
tsstats/events.py
|
tsstats/events.py
|
from collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
|
Add tsstats/event.py for easy event-initialization
|
Add tsstats/event.py for easy event-initialization
|
Python
|
mit
|
Thor77/TeamspeakStats,Thor77/TeamspeakStats
|
Add tsstats/event.py for easy event-initialization
|
from collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
|
<commit_before><commit_msg>Add tsstats/event.py for easy event-initialization<commit_after>
|
from collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
|
Add tsstats/event.py for easy event-initializationfrom collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
|
<commit_before><commit_msg>Add tsstats/event.py for easy event-initialization<commit_after>from collections import namedtuple
Event = namedtuple(
'Event', ['timestamp', 'identifier', 'action', 'arg', 'arg_is_client']
)
def nick(timestamp, identifier, nick):
return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)
def connect(timestamp, identifier):
return Event(
timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False
)
def disconnect(timestamp, identifier):
return Event(
timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False
)
def kick(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'kick', target_identifier, arg_is_client=True
)
def ban(timestamp, identifier, target_identifier):
return Event(
timestamp, identifier, 'ban', target_identifier, arg_is_client=True
)
|
|
c0b05a43e10693f8aab87a7f86726d512b7494fc
|
bluebottle/clients/management/commands/export_tenants.py
|
bluebottle/clients/management/commands/export_tenants.py
|
import json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
|
Add tenant exporter for accounting
|
Add tenant exporter for accounting
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add tenant exporter for accounting
|
import json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
|
<commit_before><commit_msg>Add tenant exporter for accounting<commit_after>
|
import json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
|
Add tenant exporter for accountingimport json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
|
<commit_before><commit_msg>Add tenant exporter for accounting<commit_after>import json
from rest_framework.authtoken.models import Token
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
class Command(BaseCommand):
help = 'Export tenants, so that we can import them into the accounting app'
def add_arguments(self, parser):
parser.add_argument('--file', type=str, default=None, action='store')
def handle(self, *args, **options):
results = []
for client in Client.objects.all():
properties.set_tenant(client)
with LocalTenant(client, clear_tenant=True):
ContentType.objects.clear_cache()
accounts = []
for merchant in properties.MERCHANT_ACCOUNTS:
if merchant['merchant'] == 'docdata':
accounts.append(
{
'service_type': 'docdata',
'username': merchant['merchant_name']
}
)
api_key = Token.objects.get(user__username='accounting').key
results.append({
"name": client.schema_name,
"domain": properties.TENANT_MAIL_PROPERTIES['website'],
"api_key": api_key,
"accounts": accounts
})
if options['file']:
text_file = open(options['file'], "w")
text_file.write(json.dumps(results))
text_file.close()
else:
print json.dumps(results)
|
|
dfbf888ca0b56448a4f211900b16e3c85648b241
|
editorsnotes/main/migrations/0025_auto_20160628_0913.py
|
editorsnotes/main/migrations/0025_auto_20160628_0913.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
|
Add migration for changing docstring of Note.is_private to unicode
|
Add migration for changing docstring of Note.is_private to unicode
(instead of a bytes)
|
Python
|
agpl-3.0
|
editorsnotes/editorsnotes,editorsnotes/editorsnotes
|
Add migration for changing docstring of Note.is_private to unicode
(instead of a bytes)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
|
<commit_before><commit_msg>Add migration for changing docstring of Note.is_private to unicode
(instead of a bytes)<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
|
Add migration for changing docstring of Note.is_private to unicode
(instead of a bytes)# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
|
<commit_before><commit_msg>Add migration for changing docstring of Note.is_private to unicode
(instead of a bytes)<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-06-28 09:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0024_topic_ld'),
]
operations = [
migrations.AlterField(
model_name='note',
name='is_private',
field=models.BooleanField(default=False, help_text="If true, will only be be viewable to users who belong to the note's project."),
),
]
|
|
25ff8c6f8bc9d70886d004f8b64f08facb8c12cf
|
leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py
|
leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py
|
# The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
|
Create Find the Celebrity sol for Leetcode
|
Create Find the Celebrity sol for Leetcode
|
Python
|
mit
|
Chasego/cod,cc13ny/algo,cc13ny/Allin,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codirit,Chasego/codirit,cc13ny/algo,cc13ny/Allin,cc13ny/algo,Chasego/codirit,cc13ny/algo,Chasego/codi,Chasego/codi,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/codi,Chasego/cod,Chasego/codi,Chasego/codirit,Chasego/cod,cc13ny/Allin,Chasego/cod
|
Create Find the Celebrity sol for Leetcode
|
# The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
|
<commit_before><commit_msg>Create Find the Celebrity sol for Leetcode<commit_after>
|
# The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
|
Create Find the Celebrity sol for Leetcode# The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
|
<commit_before><commit_msg>Create Find the Celebrity sol for Leetcode<commit_after># The knows API is already defined for you.
# @param a, person a
# @param b, person b
# @return a boolean, whether a knows b
# def knows(a, b):
class Solution(object):
def findCelebrity(self, n):
"""
:type n: int
:rtype: int
"""
if n < 2:
return -1
candidate = 0
for i in range(1, n):
if not knows(i, candidate):
candidate = i
for i in range(n):
if i == candidate:
continue
if not knows(i, candidate) or knows(candidate, i):
return -1
return candidate
|
|
1a4052deb8e0ab2deb7038220ae23d7bb9311ce9
|
ovf_to_facter.py
|
ovf_to_facter.py
|
#!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
|
Add initial version of the script
|
Add initial version of the script
|
Python
|
mit
|
agarstang/vmware-ofv-to-facter
|
Add initial version of the script
|
#!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
|
<commit_before><commit_msg>Add initial version of the script<commit_after>
|
#!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
|
Add initial version of the script#!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
|
<commit_before><commit_msg>Add initial version of the script<commit_after>#!/usr/bin/python
#stdlib
import json
import os
import subprocess
from xml.dom.minidom import parseString
def which(cmd):
"""Python implementation of `which` command."""
for path in os.environ["PATH"].split(os.pathsep):
file = os.path.join(path, cmd)
if os.path.exists(file) and os.access(file, os.X_OK):
return file
elif os.name == "nt":
for ext in os.environ["PATHEXT"].split(os.pathsep):
full = file + ext
if os.path.exists(full) and os.access(full, os.X_OK):
return full
return None
FACTER = which("facter")
VMTOOLS = which("vmtoolsd")
def facter(*args):
facts = json.loads(subprocess.check_output([FACTER, '--json', '--no-external'] + [ arg for arg in args ]))
return facts
def findXmlSection(dom, sectionName):
sections = dom.getElementsByTagName(sectionName)
return sections[0]
def getOVFProperties(ovfEnv):
dom = parseString(ovfEnv)
section = findXmlSection(dom, "PropertySection")
propertyMap = {}
for property in section.getElementsByTagName("Property"):
key = property.getAttribute("oe:key")
value = property.getAttribute("oe:value")
propertyMap[key] = value
dom.unlink()
return propertyMap
def getVMWareOvfEnv():
if VMTOOLS == None:
raise Exception("VMWare Tools not installed.")
try:
ovf = subprocess.check_output([VMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'], stderr=subprocess.STDOUT)
properties = getOVFProperties(ovf)
print "ovf=true"
for key, value in properties.iteritems():
print "ovf_" + key + "=" + value
except:
print "ovf=false"
return
if __name__ == "__main__":
facts = facter("is_virtual", "virtual")
if (facts['is_virtual'] == 'true') and (facts['virtual'] == 'vmware'):
getVMWareOvfEnv()
|
|
1396ff4ab4e6664c265f97958951815a525f7823
|
reddit_donate/pages.py
|
reddit_donate/pages.py
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
|
Remove confusing navigation tabs from header.
|
Remove confusing navigation tabs from header.
|
Python
|
bsd-3-clause
|
reddit/reddit-plugin-donate,madbook/reddit-plugin-donate,reddit/reddit-plugin-donate,madbook/reddit-plugin-donate,madbook/reddit-plugin-donate,reddit/reddit-plugin-donate
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
Remove confusing navigation tabs from header.
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
|
<commit_before>from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
<commit_msg>Remove confusing navigation tabs from header.<commit_after>
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
Remove confusing navigation tabs from header.from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
|
<commit_before>from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
<commit_msg>Remove confusing navigation tabs from header.<commit_after>from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
def build_toolbars(self):
# get rid of tabs on the top
return []
class DonateLanding(Templated):
pass
|
4db13bdab18934bebcfe5b102044f936e0eab892
|
etc/component_list.py
|
etc/component_list.py
|
COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
|
Add a place to put random stuff and a list of components as a python module.
|
Add a place to put random stuff and a list of components as a python module.
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
Add a place to put random stuff and a list of components as a python module.
|
COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
|
<commit_before><commit_msg>Add a place to put random stuff and a list of components as a python module.<commit_after>
|
COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
|
Add a place to put random stuff and a list of components as a python module.COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
|
<commit_before><commit_msg>Add a place to put random stuff and a list of components as a python module.<commit_after>COMPONENTS = [
"AdaBoost",
"AutoInvert",
"AutoMlpClassifier",
"BiggestCcExtractor",
"BinarizeByHT",
"BinarizeByOtsu",
"BinarizeByRange",
"BinarizeBySauvola",
"BitDataset",
"BitNN",
"BookStore",
"CascadedMLP",
"CenterFeatureMap",
"ConnectedComponentSegmenter",
"CurvedCutSegmenter",
"CurvedCutWithCcSegmenter",
"Degradation",
"DeskewGrayPageByRAST",
"DeskewPageByRAST",
"DocClean",
"DpSegmenter",
"EnetClassifier",
"EuclideanDistances",
"KnnClassifier",
"LatinClassifier",
"Linerec",
"LinerecExtracted",
"MetaLinerec",
"NullLinerec",
"OcroFST",
"OldBookStore",
"PageFrameRAST",
"Pages",
"RaggedDataset8",
"RaveledExtractor",
"RmBig",
"RmHalftone",
"RmUnderline",
"RowDataset8",
"ScaledImageExtractor",
"SegmentLineByCCS",
"SegmentLineByGCCS",
"SegmentLineByProjection",
"SegmentPageBy1CP",
"SegmentPageByMorphTrivial",
"SegmentPageByRAST",
"SegmentPageByRAST1",
"SegmentPageByVORONOI",
"SegmentPageByXYCUTS",
"SegmentWords",
"SimpleFeatureMap",
"SimpleGrouper",
"SkelSegmenter",
"SmartBookStore",
"SqliteBuffer",
"SqliteDataset",
"StandardExtractor",
"StandardGrouper",
"StandardPreprocessing",
"TextImageSegByLogReg",
"adaboost",
"biggestcc",
"bitdataset",
"bitnn",
"cfmap",
"cmlp",
"dpseg",
"edist",
"enet",
"knn",
"latin",
"linerec",
"linerec_extracted",
"mappedmlp",
"metalinerec",
"mlp",
"nulllinerec",
"raggeddataset8",
"raveledfe",
"rowdataset8",
"scaledfe",
"sfmap",
"simplegrouper",
"sqlitebuffer",
"sqliteds",
]
|
|
0ede4e22370a3f8217fee8ff995a9c7057d8b00b
|
vumi_http_retry/tests/test_redis.py
|
vumi_http_retry/tests/test_redis.py
|
import json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
|
Add test for redis test helper
|
Add test for redis test helper
|
Python
|
bsd-3-clause
|
praekelt/vumi-http-retry-api,praekelt/vumi-http-retry-api
|
Add test for redis test helper
|
import json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
|
<commit_before><commit_msg>Add test for redis test helper<commit_after>
|
import json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
|
Add test for redis test helperimport json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
|
<commit_before><commit_msg>Add test for redis test helper<commit_after>import json
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from vumi_http_retry.tests.redis import create_client, zitems
class TestRedis(TestCase):
@inlineCallbacks
def setUp(self):
self.redis = yield create_client()
@inlineCallbacks
def tearDown(self):
yield self.redis.delete('foo')
yield self.redis.transport.loseConnection()
@inlineCallbacks
def test_add_request(self):
self.assertEqual((yield zitems(self.redis, 'foo')), [])
yield self.redis.zadd('foo', 1, json.dumps({'bar': 23}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
])
yield self.redis.zadd('foo', 2, json.dumps({'baz': 42}))
self.assertEqual((yield zitems(self.redis, 'foo')), [
(1, {'bar': 23}),
(2, {'baz': 42}),
])
|
|
fa55ceb71ff254f8ed3413a35acfe20da7c03a91
|
rxbtcomm.py
|
rxbtcomm.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
|
Create BT Comm wrapper class
|
Create BT Comm wrapper class
|
Python
|
apache-2.0
|
javatechs/RxCmd,javatechs/RxCmd,javatechs/RxCmd
|
Create BT Comm wrapper class
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
|
<commit_before><commit_msg>Create BT Comm wrapper class<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
|
Create BT Comm wrapper class#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
|
<commit_before><commit_msg>Create BT Comm wrapper class<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 F Dou<programmingrobotsstudygroup@gmail.com>
# See LICENSE for details.
import bluetooth
import logging
class RxBtComm(object):
"""BT communication wrapper:
Attributes:
addy: A string representing the device address.
name: A string representing the device name.
"""
logging.basicConfig(level=logging.DEBUG)
def __init__(self, addr, name=None):
"""Return a RxBtComm object
param *addr* device address
param *name* device name
"""
self.addr = addr
self.name = name
self.sock = None
"""connect:
Connect to BT addr
"""
def connect(self):
try:
port = 1
self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
self.sock.connect((self.addr, port))
return bluetooth.lookup_name(self.addr)
except Exception as e:
logging.exception(e)
return ''
"""disconnect:
Disconnect from BT addr
"""
def disconnect(self):
try:
self.sock.close()
except Exception as e:
logging.exception(e)
self.sock = None
"""send:
Send a command to host
"""
def send(self, cmd):
self.sock.send(cmd)
"""recieve:
Recieve a response from host
"""
def recieve(self):
self.sock.recieve(cmd)
### Replace xx:xx:xx:xx:xx:xx with your test device address
#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')
#test.connect()
#test.send('date')
#test.disconnect()
|
|
3de2e625af9047b64cc2718e6e79be0c428b6ae7
|
CodeFights/extractEachKth.py
|
CodeFights/extractEachKth.py
|
#!/usr/local/bin/python
# Code Fights Extract Each Kth Problem
def extractEachKth(inputArray, k):
return [e for i, e in enumerate(inputArray) if (i + 1) % k != 0]
def main():
tests = [
[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, [1, 2, 4, 5, 7, 8, 10]],
[[1, 1, 1, 1, 1], 1, []],
[[1, 2, 1, 2, 1, 2, 1, 2], 2, [1, 1, 1, 1]]
]
for t in tests:
res = extractEachKth(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: extractEachKth({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: extractEachKth({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights extract each kth problem
|
Solve Code Fights extract each kth problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights extract each kth problem
|
#!/usr/local/bin/python
# Code Fights Extract Each Kth Problem
def extractEachKth(inputArray, k):
return [e for i, e in enumerate(inputArray) if (i + 1) % k != 0]
def main():
tests = [
[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, [1, 2, 4, 5, 7, 8, 10]],
[[1, 1, 1, 1, 1], 1, []],
[[1, 2, 1, 2, 1, 2, 1, 2], 2, [1, 1, 1, 1]]
]
for t in tests:
res = extractEachKth(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: extractEachKth({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: extractEachKth({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights extract each kth problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Extract Each Kth Problem
def extractEachKth(inputArray, k):
return [e for i, e in enumerate(inputArray) if (i + 1) % k != 0]
def main():
tests = [
[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, [1, 2, 4, 5, 7, 8, 10]],
[[1, 1, 1, 1, 1], 1, []],
[[1, 2, 1, 2, 1, 2, 1, 2], 2, [1, 1, 1, 1]]
]
for t in tests:
res = extractEachKth(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: extractEachKth({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: extractEachKth({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights extract each kth problem#!/usr/local/bin/python
# Code Fights Extract Each Kth Problem
def extractEachKth(inputArray, k):
return [e for i, e in enumerate(inputArray) if (i + 1) % k != 0]
def main():
tests = [
[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, [1, 2, 4, 5, 7, 8, 10]],
[[1, 1, 1, 1, 1], 1, []],
[[1, 2, 1, 2, 1, 2, 1, 2], 2, [1, 1, 1, 1]]
]
for t in tests:
res = extractEachKth(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: extractEachKth({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: extractEachKth({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights extract each kth problem<commit_after>#!/usr/local/bin/python
# Code Fights Extract Each Kth Problem
def extractEachKth(inputArray, k):
return [e for i, e in enumerate(inputArray) if (i + 1) % k != 0]
def main():
tests = [
[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3, [1, 2, 4, 5, 7, 8, 10]],
[[1, 1, 1, 1, 1], 1, []],
[[1, 2, 1, 2, 1, 2, 1, 2], 2, [1, 1, 1, 1]]
]
for t in tests:
res = extractEachKth(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: extractEachKth({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: extractEachKth({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
|
60b5948508a67cb213ca04b5faacb77e27d8f84c
|
samples/forms.py
|
samples/forms.py
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
fields = ['was_applied', 'date_applied', ]
|
Add fields expicitly declared in form
|
:art: Add fields expicitly declared in form
|
Python
|
mit
|
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys,gcrsaldanha/fiocruz,gcrsaldanha/fiocruz
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
:art: Add fields expicitly declared in form
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
fields = ['was_applied', 'date_applied', ]
|
<commit_before>import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
<commit_msg>:art: Add fields expicitly declared in form<commit_after>
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
fields = ['was_applied', 'date_applied', ]
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
:art: Add fields expicitly declared in formimport datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
fields = ['was_applied', 'date_applied', ]
|
<commit_before>import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
<commit_msg>:art: Add fields expicitly declared in form<commit_after>import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
fields = ['was_applied', 'date_applied', ]
|
877a7b7449a1d88c14633376a2dfaca8c619c26a
|
exercises/chapter_03/exercise_03_06/exercise_03_06.py
|
exercises/chapter_03/exercise_03_06/exercise_03_06.py
|
# 3-6 Guest List
guest_list = ["Albert Einstein", "Isac Newton", "Marie Curie", "Galileo Galilei"]
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
cancelation_message = guest_list[1] + " can not attend the dinner."
print(cancelation_message)
guest_list[1] = "Charles Darwin"
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "I have a bigger table now so three more people will be invited."
print(message)
guest_list.insert(0, "Stephen Hawking")
guest_list.insert(2, "Louis Pasteur")
guest_list.append("Nicolaus Copernicus")
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[4] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[5] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[6] + " you are invited to dinner at 7 on saturday."
print(message)
|
Add solution to exercis 3.6.
|
Add solution to exercis 3.6.
|
Python
|
mit
|
HenrikSamuelsson/python-crash-course
|
Add solution to exercis 3.6.
|
# 3-6 Guest List
guest_list = ["Albert Einstein", "Isac Newton", "Marie Curie", "Galileo Galilei"]
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
cancelation_message = guest_list[1] + " can not attend the dinner."
print(cancelation_message)
guest_list[1] = "Charles Darwin"
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "I have a bigger table now so three more people will be invited."
print(message)
guest_list.insert(0, "Stephen Hawking")
guest_list.insert(2, "Louis Pasteur")
guest_list.append("Nicolaus Copernicus")
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[4] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[5] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[6] + " you are invited to dinner at 7 on saturday."
print(message)
|
<commit_before><commit_msg>Add solution to exercis 3.6.<commit_after>
|
# 3-6 Guest List
guest_list = ["Albert Einstein", "Isac Newton", "Marie Curie", "Galileo Galilei"]
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
cancelation_message = guest_list[1] + " can not attend the dinner."
print(cancelation_message)
guest_list[1] = "Charles Darwin"
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "I have a bigger table now so three more people will be invited."
print(message)
guest_list.insert(0, "Stephen Hawking")
guest_list.insert(2, "Louis Pasteur")
guest_list.append("Nicolaus Copernicus")
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[4] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[5] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[6] + " you are invited to dinner at 7 on saturday."
print(message)
|
Add solution to exercis 3.6.# 3-6 Guest List
guest_list = ["Albert Einstein", "Isac Newton", "Marie Curie", "Galileo Galilei"]
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
cancelation_message = guest_list[1] + " can not attend the dinner."
print(cancelation_message)
guest_list[1] = "Charles Darwin"
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "I have a bigger table now so three more people will be invited."
print(message)
guest_list.insert(0, "Stephen Hawking")
guest_list.insert(2, "Louis Pasteur")
guest_list.append("Nicolaus Copernicus")
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[4] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[5] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[6] + " you are invited to dinner at 7 on saturday."
print(message)
|
<commit_before><commit_msg>Add solution to exercis 3.6.<commit_after># 3-6 Guest List
guest_list = ["Albert Einstein", "Isac Newton", "Marie Curie", "Galileo Galilei"]
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
cancelation_message = guest_list[1] + " can not attend the dinner."
print(cancelation_message)
guest_list[1] = "Charles Darwin"
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "I have a bigger table now so three more people will be invited."
print(message)
guest_list.insert(0, "Stephen Hawking")
guest_list.insert(2, "Louis Pasteur")
guest_list.append("Nicolaus Copernicus")
message = "Hi " + guest_list[0] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[1] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[2] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[3] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[4] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[5] + " you are invited to dinner at 7 on saturday."
print(message)
message = "Hi " + guest_list[6] + " you are invited to dinner at 7 on saturday."
print(message)
|
|
a8db8c0448d98e2de0e662581542bd644e673c7c
|
geotrek/core/migrations/0018_remove_other_objects_from_factories.py
|
geotrek/core/migrations/0018_remove_other_objects_from_factories.py
|
# Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_objects_factories(apps, schema_editor):
ComfortModel = apps.get_model('core', 'Comfort')
PathSourceModel = apps.get_model('core', 'PathSource')
StakeModel = apps.get_model('core', 'Stake')
ComfortModel.objects.filter(paths__isnull=True, comfort__icontains="Comfort ").delete()
PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains="PathSource ").delete()
StakeModel.objects.filter(paths__isnull=True, comfort__icontains="Stake ").delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0017_remove_path_from_factories'),
]
operations = [
migrations.RunPython(remove_generated_objects_factories)
]
|
Add migration removing generated objects with factories
|
Add migration removing generated objects with factories
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin
|
Add migration removing generated objects with factories
|
# Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_objects_factories(apps, schema_editor):
ComfortModel = apps.get_model('core', 'Comfort')
PathSourceModel = apps.get_model('core', 'PathSource')
StakeModel = apps.get_model('core', 'Stake')
ComfortModel.objects.filter(paths__isnull=True, comfort__icontains="Comfort ").delete()
PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains="PathSource ").delete()
StakeModel.objects.filter(paths__isnull=True, comfort__icontains="Stake ").delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0017_remove_path_from_factories'),
]
operations = [
migrations.RunPython(remove_generated_objects_factories)
]
|
<commit_before><commit_msg>Add migration removing generated objects with factories<commit_after>
|
# Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_objects_factories(apps, schema_editor):
ComfortModel = apps.get_model('core', 'Comfort')
PathSourceModel = apps.get_model('core', 'PathSource')
StakeModel = apps.get_model('core', 'Stake')
ComfortModel.objects.filter(paths__isnull=True, comfort__icontains="Comfort ").delete()
PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains="PathSource ").delete()
StakeModel.objects.filter(paths__isnull=True, comfort__icontains="Stake ").delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0017_remove_path_from_factories'),
]
operations = [
migrations.RunPython(remove_generated_objects_factories)
]
|
Add migration removing generated objects with factories# Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_objects_factories(apps, schema_editor):
ComfortModel = apps.get_model('core', 'Comfort')
PathSourceModel = apps.get_model('core', 'PathSource')
StakeModel = apps.get_model('core', 'Stake')
ComfortModel.objects.filter(paths__isnull=True, comfort__icontains="Comfort ").delete()
PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains="PathSource ").delete()
StakeModel.objects.filter(paths__isnull=True, comfort__icontains="Stake ").delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0017_remove_path_from_factories'),
]
operations = [
migrations.RunPython(remove_generated_objects_factories)
]
|
<commit_before><commit_msg>Add migration removing generated objects with factories<commit_after># Generated by Django 2.0.13 on 2020-04-06 13:40
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.db import migrations
def remove_generated_objects_factories(apps, schema_editor):
ComfortModel = apps.get_model('core', 'Comfort')
PathSourceModel = apps.get_model('core', 'PathSource')
StakeModel = apps.get_model('core', 'Stake')
ComfortModel.objects.filter(paths__isnull=True, comfort__icontains="Comfort ").delete()
PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains="PathSource ").delete()
StakeModel.objects.filter(paths__isnull=True, comfort__icontains="Stake ").delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0017_remove_path_from_factories'),
]
operations = [
migrations.RunPython(remove_generated_objects_factories)
]
|
|
49c673c5c8374867fc9bf026717fe137bdba84bc
|
greengraph/test/test_graph.py
|
greengraph/test/test_graph.py
|
from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
start = "London"
end = "Durham"
def test_Greengraph_init():
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
|
Add test file for graph.py and add test of Greengraph class constructor
|
Add test file for graph.py and add test of Greengraph class constructor
|
Python
|
mit
|
MikeVasmer/GreenGraphCoursework
|
Add test file for graph.py and add test of Greengraph class constructor
|
from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
start = "London"
end = "Durham"
def test_Greengraph_init():
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
|
<commit_before><commit_msg>Add test file for graph.py and add test of Greengraph class constructor<commit_after>
|
from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
start = "London"
end = "Durham"
def test_Greengraph_init():
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
|
Add test file for graph.py and add test of Greengraph class constructorfrom greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
start = "London"
end = "Durham"
def test_Greengraph_init():
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
|
<commit_before><commit_msg>Add test file for graph.py and add test of Greengraph class constructor<commit_after>from greengraph.map import Map
from greengraph.graph import Greengraph
from mock import patch
import geopy
from nose.tools import assert_equal
start = "London"
end = "Durham"
def test_Greengraph_init():
with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:
test_Greengraph = Greengraph(start,end)
#Test that GoogleV3 is called with the correct parameters
mock_GoogleV3.assert_called_with(domain="maps.google.co.uk")
#Test that the start and end fields are initialised correctly
assert_equal(test_Greengraph.start,start)
assert_equal(test_Greengraph.end,end)
|
|
82e4c67bd7643eed06e7cd170ca1d0de41c70912
|
core/data/DataAnalyzer.py
|
core/data/DataAnalyzer.py
|
"""
DataAnalyzer
:Authors:
Berend Klein Haneveld
"""
class DataAnalyzer(object):
"""
DataAnalyzer
"""
def __init__(self):
super(DataAnalyzer, self).__init__()
@classmethod
def histogramForData(cls, data, nrBins):
"""
Samples the image data in order to create bins
for making a histogram of the data.
"""
dims = data.GetDimensions()
minVal, maxVal = data.GetScalarRange()
bins = [0 for x in range(nrBins)]
stepSize = 3
for z in range(0, dims[2], stepSize):
for y in range(0, dims[1], stepSize):
for x in range(0, dims[0], stepSize):
element = data.GetScalarComponentAsFloat(x, y, z, 0)
index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))
bins[index] += 1
return bins
|
Add a data analyzer class.
|
Add a data analyzer class.
Might come in handy to get statistical data of image datasets.
|
Python
|
mit
|
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
|
Add a data analyzer class.
Might come in handy to get statistical data of image datasets.
|
"""
DataAnalyzer
:Authors:
Berend Klein Haneveld
"""
class DataAnalyzer(object):
"""
DataAnalyzer
"""
def __init__(self):
super(DataAnalyzer, self).__init__()
@classmethod
def histogramForData(cls, data, nrBins):
"""
Samples the image data in order to create bins
for making a histogram of the data.
"""
dims = data.GetDimensions()
minVal, maxVal = data.GetScalarRange()
bins = [0 for x in range(nrBins)]
stepSize = 3
for z in range(0, dims[2], stepSize):
for y in range(0, dims[1], stepSize):
for x in range(0, dims[0], stepSize):
element = data.GetScalarComponentAsFloat(x, y, z, 0)
index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))
bins[index] += 1
return bins
|
<commit_before><commit_msg>Add a data analyzer class.
Might come in handy to get statistical data of image datasets.<commit_after>
|
"""
DataAnalyzer
:Authors:
Berend Klein Haneveld
"""
class DataAnalyzer(object):
"""
DataAnalyzer
"""
def __init__(self):
super(DataAnalyzer, self).__init__()
@classmethod
def histogramForData(cls, data, nrBins):
"""
Samples the image data in order to create bins
for making a histogram of the data.
"""
dims = data.GetDimensions()
minVal, maxVal = data.GetScalarRange()
bins = [0 for x in range(nrBins)]
stepSize = 3
for z in range(0, dims[2], stepSize):
for y in range(0, dims[1], stepSize):
for x in range(0, dims[0], stepSize):
element = data.GetScalarComponentAsFloat(x, y, z, 0)
index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))
bins[index] += 1
return bins
|
Add a data analyzer class.
Might come in handy to get statistical data of image datasets."""
DataAnalyzer
:Authors:
Berend Klein Haneveld
"""
class DataAnalyzer(object):
"""
DataAnalyzer
"""
def __init__(self):
super(DataAnalyzer, self).__init__()
@classmethod
def histogramForData(cls, data, nrBins):
"""
Samples the image data in order to create bins
for making a histogram of the data.
"""
dims = data.GetDimensions()
minVal, maxVal = data.GetScalarRange()
bins = [0 for x in range(nrBins)]
stepSize = 3
for z in range(0, dims[2], stepSize):
for y in range(0, dims[1], stepSize):
for x in range(0, dims[0], stepSize):
element = data.GetScalarComponentAsFloat(x, y, z, 0)
index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))
bins[index] += 1
return bins
|
<commit_before><commit_msg>Add a data analyzer class.
Might come in handy to get statistical data of image datasets.<commit_after>"""
DataAnalyzer
:Authors:
Berend Klein Haneveld
"""
class DataAnalyzer(object):
"""
DataAnalyzer
"""
def __init__(self):
super(DataAnalyzer, self).__init__()
@classmethod
def histogramForData(cls, data, nrBins):
"""
Samples the image data in order to create bins
for making a histogram of the data.
"""
dims = data.GetDimensions()
minVal, maxVal = data.GetScalarRange()
bins = [0 for x in range(nrBins)]
stepSize = 3
for z in range(0, dims[2], stepSize):
for y in range(0, dims[1], stepSize):
for x in range(0, dims[0], stepSize):
element = data.GetScalarComponentAsFloat(x, y, z, 0)
index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))
bins[index] += 1
return bins
|
|
3f85610873d88592970c64661e526b2a576e300f
|
sms_generator.py
|
sms_generator.py
|
def generate_new_procedure_message(procedure, ward, timeframe, doctor):
unique_reference = str(1)
message = str.format("{0} is available on {1}. Attend the ward in {2} and meet {3} in the junior doctors' office. "
"To accept this opportunity reply with {4}",
procedure,
ward,
timeframe,
doctor,
unique_reference)
print(message)
return message
def generate_success_response_message(procedure, ward, timeframe, doctor):
message = str.format("Please attend {0} in {1} and ask for {2} to complete this supervised "
"procedure. This learning opportunity has been reserved exclusively for you, please make "
"every effort to attend.",
ward,
timeframe,
doctor)
print(message)
return message
def generate_not_success_response_message():
message = str.format("Sorry - procedure already taken this time.")
print(message)
return message
|
Add new sms message generator
|
Add new sms message generator
|
Python
|
mit
|
bsharif/SLOT,nhshd-slot/SLOT,bsharif/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT,bsharif/SLOT
|
Add new sms message generator
|
def generate_new_procedure_message(procedure, ward, timeframe, doctor):
unique_reference = str(1)
message = str.format("{0} is available on {1}. Attend the ward in {2} and meet {3} in the junior doctors' office. "
"To accept this opportunity reply with {4}",
procedure,
ward,
timeframe,
doctor,
unique_reference)
print(message)
return message
def generate_success_response_message(procedure, ward, timeframe, doctor):
message = str.format("Please attend {0} in {1} and ask for {2} to complete this supervised "
"procedure. This learning opportunity has been reserved exclusively for you, please make "
"every effort to attend.",
ward,
timeframe,
doctor)
print(message)
return message
def generate_not_success_response_message():
message = str.format("Sorry - procedure already taken this time.")
print(message)
return message
|
<commit_before><commit_msg>Add new sms message generator<commit_after>
|
def generate_new_procedure_message(procedure, ward, timeframe, doctor):
unique_reference = str(1)
message = str.format("{0} is available on {1}. Attend the ward in {2} and meet {3} in the junior doctors' office. "
"To accept this opportunity reply with {4}",
procedure,
ward,
timeframe,
doctor,
unique_reference)
print(message)
return message
def generate_success_response_message(procedure, ward, timeframe, doctor):
message = str.format("Please attend {0} in {1} and ask for {2} to complete this supervised "
"procedure. This learning opportunity has been reserved exclusively for you, please make "
"every effort to attend.",
ward,
timeframe,
doctor)
print(message)
return message
def generate_not_success_response_message():
message = str.format("Sorry - procedure already taken this time.")
print(message)
return message
|
Add new sms message generatordef generate_new_procedure_message(procedure, ward, timeframe, doctor):
unique_reference = str(1)
message = str.format("{0} is available on {1}. Attend the ward in {2} and meet {3} in the junior doctors' office. "
"To accept this opportunity reply with {4}",
procedure,
ward,
timeframe,
doctor,
unique_reference)
print(message)
return message
def generate_success_response_message(procedure, ward, timeframe, doctor):
message = str.format("Please attend {0} in {1} and ask for {2} to complete this supervised "
"procedure. This learning opportunity has been reserved exclusively for you, please make "
"every effort to attend.",
ward,
timeframe,
doctor)
print(message)
return message
def generate_not_success_response_message():
message = str.format("Sorry - procedure already taken this time.")
print(message)
return message
|
<commit_before><commit_msg>Add new sms message generator<commit_after>def generate_new_procedure_message(procedure, ward, timeframe, doctor):
unique_reference = str(1)
message = str.format("{0} is available on {1}. Attend the ward in {2} and meet {3} in the junior doctors' office. "
"To accept this opportunity reply with {4}",
procedure,
ward,
timeframe,
doctor,
unique_reference)
print(message)
return message
def generate_success_response_message(procedure, ward, timeframe, doctor):
message = str.format("Please attend {0} in {1} and ask for {2} to complete this supervised "
"procedure. This learning opportunity has been reserved exclusively for you, please make "
"every effort to attend.",
ward,
timeframe,
doctor)
print(message)
return message
def generate_not_success_response_message():
message = str.format("Sorry - procedure already taken this time.")
print(message)
return message
|
|
8e8e11990e430302eca24d32ba0b88dcc66233d6
|
clburlison_scripts/connect2_wifi_pyobjc/connect2_wifi_pyobjc.py
|
clburlison_scripts/connect2_wifi_pyobjc/connect2_wifi_pyobjc.py
|
#!/usr/bin/python
"""
I didn't create this but I'm storing it so I can reuse it.
http://stackoverflow.com/a/34967364/4811765
"""
import objc
SSID = "MyWifiNetwork"
PASSWORD = "MyWifiPassword"
objc.loadBundle('CoreWLAN',
bundle_path='/System/Library/Frameworks/CoreWLAN.framework',
module_globals=globals())
iface = CWInterface.interface()
networks, err = iface.scanForNetworksWithName_err_(SSID, None)
network = networks.anyObject()
success, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)
|
Add connect2 wifi via pyobjc
|
Add connect2 wifi via pyobjc
|
Python
|
mit
|
clburlison/scripts,clburlison/scripts,clburlison/scripts
|
Add connect2 wifi via pyobjc
|
#!/usr/bin/python
"""
I didn't create this but I'm storing it so I can reuse it.
http://stackoverflow.com/a/34967364/4811765
"""
import objc
SSID = "MyWifiNetwork"
PASSWORD = "MyWifiPassword"
objc.loadBundle('CoreWLAN',
bundle_path='/System/Library/Frameworks/CoreWLAN.framework',
module_globals=globals())
iface = CWInterface.interface()
networks, err = iface.scanForNetworksWithName_err_(SSID, None)
network = networks.anyObject()
success, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)
|
<commit_before><commit_msg>Add connect2 wifi via pyobjc<commit_after>
|
#!/usr/bin/python
"""
I didn't create this but I'm storing it so I can reuse it.
http://stackoverflow.com/a/34967364/4811765
"""
import objc
SSID = "MyWifiNetwork"
PASSWORD = "MyWifiPassword"
objc.loadBundle('CoreWLAN',
bundle_path='/System/Library/Frameworks/CoreWLAN.framework',
module_globals=globals())
iface = CWInterface.interface()
networks, err = iface.scanForNetworksWithName_err_(SSID, None)
network = networks.anyObject()
success, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)
|
Add connect2 wifi via pyobjc#!/usr/bin/python
"""
I didn't create this but I'm storing it so I can reuse it.
http://stackoverflow.com/a/34967364/4811765
"""
import objc
SSID = "MyWifiNetwork"
PASSWORD = "MyWifiPassword"
objc.loadBundle('CoreWLAN',
bundle_path='/System/Library/Frameworks/CoreWLAN.framework',
module_globals=globals())
iface = CWInterface.interface()
networks, err = iface.scanForNetworksWithName_err_(SSID, None)
network = networks.anyObject()
success, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)
|
<commit_before><commit_msg>Add connect2 wifi via pyobjc<commit_after>#!/usr/bin/python
"""
I didn't create this but I'm storing it so I can reuse it.
http://stackoverflow.com/a/34967364/4811765
"""
import objc
SSID = "MyWifiNetwork"
PASSWORD = "MyWifiPassword"
objc.loadBundle('CoreWLAN',
bundle_path='/System/Library/Frameworks/CoreWLAN.framework',
module_globals=globals())
iface = CWInterface.interface()
networks, err = iface.scanForNetworksWithName_err_(SSID, None)
network = networks.anyObject()
success, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)
|
|
a6d6b833e33dc465b0fa828018e2cbba748f8282
|
pygraphc/evaluation/EvaluationUtility.py
|
pygraphc/evaluation/EvaluationUtility.py
|
class EvaluationUtility(object):
@staticmethod
def convert_to_text(graph, clusters):
# convert clustering result from graph to text
new_clusters = {}
for cluster_id, nodes in clusters.iteritems():
for node in nodes:
members = graph.node[node]['member']
for member in members:
new_clusters.setdefault(cluster_id, []).append(member)
return new_clusters
|
Add utility class for evaluation
|
Add utility class for evaluation
|
Python
|
mit
|
studiawan/pygraphc
|
Add utility class for evaluation
|
class EvaluationUtility(object):
@staticmethod
def convert_to_text(graph, clusters):
# convert clustering result from graph to text
new_clusters = {}
for cluster_id, nodes in clusters.iteritems():
for node in nodes:
members = graph.node[node]['member']
for member in members:
new_clusters.setdefault(cluster_id, []).append(member)
return new_clusters
|
<commit_before><commit_msg>Add utility class for evaluation<commit_after>
|
class EvaluationUtility(object):
@staticmethod
def convert_to_text(graph, clusters):
# convert clustering result from graph to text
new_clusters = {}
for cluster_id, nodes in clusters.iteritems():
for node in nodes:
members = graph.node[node]['member']
for member in members:
new_clusters.setdefault(cluster_id, []).append(member)
return new_clusters
|
Add utility class for evaluation
class EvaluationUtility(object):
@staticmethod
def convert_to_text(graph, clusters):
# convert clustering result from graph to text
new_clusters = {}
for cluster_id, nodes in clusters.iteritems():
for node in nodes:
members = graph.node[node]['member']
for member in members:
new_clusters.setdefault(cluster_id, []).append(member)
return new_clusters
|
<commit_before><commit_msg>Add utility class for evaluation<commit_after>
class EvaluationUtility(object):
@staticmethod
def convert_to_text(graph, clusters):
# convert clustering result from graph to text
new_clusters = {}
for cluster_id, nodes in clusters.iteritems():
for node in nodes:
members = graph.node[node]['member']
for member in members:
new_clusters.setdefault(cluster_id, []).append(member)
return new_clusters
|
|
6a6abadc2395810076b89fb38c759f85426a0304
|
supportVectorMachine/howItWorksSupportVectorMachine.py
|
supportVectorMachine/howItWorksSupportVectorMachine.py
|
# -*- coding: utf-8 -*-
"""Support Vector Machine (SVM) classification for machine learning.
SVM is a binary classifier. The objective of the SVM is to find the best
separating hyperplane in vector space which is also referred to as the
decision boundary. And it decides what separating hyperplane is the 'best'
because the distance from it and the associating data it is separating is the
greatest at the plane in question.
This is the file where I create the algorithm from scratch.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksSupportVectorMachine.py
Todo:
* Sketch out the framework
"""
# minimize magnitude(w) and maximize b
# with constraint y_i*(x_i*w+b)>=1
# or Class*(KnownFeatures*w+b)>=1
|
Add framework for own SVM from scratch
|
Add framework for own SVM from scratch
|
Python
|
mit
|
a-holm/MachinelearningAlgorithms,a-holm/MachinelearningAlgorithms
|
Add framework for own SVM from scratch
|
# -*- coding: utf-8 -*-
"""Support Vector Machine (SVM) classification for machine learning.
SVM is a binary classifier. The objective of the SVM is to find the best
separating hyperplane in vector space which is also referred to as the
decision boundary. And it decides what separating hyperplane is the 'best'
because the distance from it and the associating data it is separating is the
greatest at the plane in question.
This is the file where I create the algorithm from scratch.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksSupportVectorMachine.py
Todo:
* Sketch out the framework
"""
# minimize magnitude(w) and maximize b
# with constraint y_i*(x_i*w+b)>=1
# or Class*(KnownFeatures*w+b)>=1
|
<commit_before><commit_msg>Add framework for own SVM from scratch<commit_after>
|
# -*- coding: utf-8 -*-
"""Support Vector Machine (SVM) classification for machine learning.
SVM is a binary classifier. The objective of the SVM is to find the best
separating hyperplane in vector space which is also referred to as the
decision boundary. And it decides what separating hyperplane is the 'best'
because the distance from it and the associating data it is separating is the
greatest at the plane in question.
This is the file where I create the algorithm from scratch.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksSupportVectorMachine.py
Todo:
* Sketch out the framework
"""
# minimize magnitude(w) and maximize b
# with constraint y_i*(x_i*w+b)>=1
# or Class*(KnownFeatures*w+b)>=1
|
Add framework for own SVM from scratch# -*- coding: utf-8 -*-
"""Support Vector Machine (SVM) classification for machine learning.
SVM is a binary classifier. The objective of the SVM is to find the best
separating hyperplane in vector space which is also referred to as the
decision boundary. And it decides what separating hyperplane is the 'best'
because the distance from it and the associating data it is separating is the
greatest at the plane in question.
This is the file where I create the algorithm from scratch.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksSupportVectorMachine.py
Todo:
* Sketch out the framework
"""
# minimize magnitude(w) and maximize b
# with constraint y_i*(x_i*w+b)>=1
# or Class*(KnownFeatures*w+b)>=1
|
<commit_before><commit_msg>Add framework for own SVM from scratch<commit_after># -*- coding: utf-8 -*-
"""Support Vector Machine (SVM) classification for machine learning.
SVM is a binary classifier. The objective of the SVM is to find the best
separating hyperplane in vector space which is also referred to as the
decision boundary. And it decides what separating hyperplane is the 'best'
because the distance from it and the associating data it is separating is the
greatest at the plane in question.
This is the file where I create the algorithm from scratch.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksSupportVectorMachine.py
Todo:
* Sketch out the framework
"""
# minimize magnitude(w) and maximize b
# with constraint y_i*(x_i*w+b)>=1
# or Class*(KnownFeatures*w+b)>=1
|
|
92ec849fc18d7cb610839abe2213ce30ceced46b
|
InvenTree/InvenTree/ci_postgresql.py
|
InvenTree/InvenTree/ci_postgresql.py
|
"""
Configuration file for running tests against a MySQL database.
"""
from InvenTree.settings import *
# Override the 'test' database
if 'test' in sys.argv:
eprint('InvenTree: Running tests - Using MySQL test database')
DATABASES['default'] = {
# Ensure postgresql backend is being used
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'inventree_test_db',
'USER': 'postgres',
'PASSWORD': '',
}
|
Add ci settings file for postgresql database
|
Add ci settings file for postgresql database
|
Python
|
mit
|
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree
|
Add ci settings file for postgresql database
|
"""
Configuration file for running tests against a MySQL database.
"""
from InvenTree.settings import *
# Override the 'test' database
if 'test' in sys.argv:
eprint('InvenTree: Running tests - Using MySQL test database')
DATABASES['default'] = {
# Ensure postgresql backend is being used
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'inventree_test_db',
'USER': 'postgres',
'PASSWORD': '',
}
|
<commit_before><commit_msg>Add ci settings file for postgresql database<commit_after>
|
"""
Configuration file for running tests against a MySQL database.
"""
from InvenTree.settings import *
# Override the 'test' database
if 'test' in sys.argv:
eprint('InvenTree: Running tests - Using MySQL test database')
DATABASES['default'] = {
# Ensure postgresql backend is being used
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'inventree_test_db',
'USER': 'postgres',
'PASSWORD': '',
}
|
Add ci settings file for postgresql database"""
Configuration file for running tests against a MySQL database.
"""
from InvenTree.settings import *
# Override the 'test' database
if 'test' in sys.argv:
eprint('InvenTree: Running tests - Using MySQL test database')
DATABASES['default'] = {
# Ensure postgresql backend is being used
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'inventree_test_db',
'USER': 'postgres',
'PASSWORD': '',
}
|
<commit_before><commit_msg>Add ci settings file for postgresql database<commit_after>"""
Configuration file for running tests against a MySQL database.
"""
from InvenTree.settings import *
# Override the 'test' database
if 'test' in sys.argv:
eprint('InvenTree: Running tests - Using MySQL test database')
DATABASES['default'] = {
# Ensure postgresql backend is being used
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'inventree_test_db',
'USER': 'postgres',
'PASSWORD': '',
}
|
|
c84ce4b2494771c48890c122420e4665828ac4f8
|
CodeFights/differentRightmostBit.py
|
CodeFights/differentRightmostBit.py
|
#!/usr/local/bin/python
# Code Different Right-most Bit (Core) Problem
def differentRightmostBit(n, m):
return (n ^ m) & -(n ^ m)
def main():
tests = [
[11, 13, 2],
[7, 23, 16],
[1, 0, 1],
[64, 65, 1],
[1073741823, 1071513599, 131072],
[42, 22, 4]
]
for t in tests:
res = differentRightmostBit(t[0], t[1])
if t[2] == res:
print("PASSED: differentRightmostBit({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: differentRightmostBit({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
|
Solve Code Fights different rightmost bit problem
|
Solve Code Fights different rightmost bit problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights different rightmost bit problem
|
#!/usr/local/bin/python
# Code Different Right-most Bit (Core) Problem
def differentRightmostBit(n, m):
return (n ^ m) & -(n ^ m)
def main():
tests = [
[11, 13, 2],
[7, 23, 16],
[1, 0, 1],
[64, 65, 1],
[1073741823, 1071513599, 131072],
[42, 22, 4]
]
for t in tests:
res = differentRightmostBit(t[0], t[1])
if t[2] == res:
print("PASSED: differentRightmostBit({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: differentRightmostBit({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights different rightmost bit problem<commit_after>
|
#!/usr/local/bin/python
# Code Different Right-most Bit (Core) Problem
def differentRightmostBit(n, m):
return (n ^ m) & -(n ^ m)
def main():
tests = [
[11, 13, 2],
[7, 23, 16],
[1, 0, 1],
[64, 65, 1],
[1073741823, 1071513599, 131072],
[42, 22, 4]
]
for t in tests:
res = differentRightmostBit(t[0], t[1])
if t[2] == res:
print("PASSED: differentRightmostBit({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: differentRightmostBit({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
|
Solve Code Fights different rightmost bit problem#!/usr/local/bin/python
# Code Different Right-most Bit (Core) Problem
def differentRightmostBit(n, m):
return (n ^ m) & -(n ^ m)
def main():
tests = [
[11, 13, 2],
[7, 23, 16],
[1, 0, 1],
[64, 65, 1],
[1073741823, 1071513599, 131072],
[42, 22, 4]
]
for t in tests:
res = differentRightmostBit(t[0], t[1])
if t[2] == res:
print("PASSED: differentRightmostBit({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: differentRightmostBit({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights different rightmost bit problem<commit_after>#!/usr/local/bin/python
# Code Different Right-most Bit (Core) Problem
def differentRightmostBit(n, m):
return (n ^ m) & -(n ^ m)
def main():
tests = [
[11, 13, 2],
[7, 23, 16],
[1, 0, 1],
[64, 65, 1],
[1073741823, 1071513599, 131072],
[42, 22, 4]
]
for t in tests:
res = differentRightmostBit(t[0], t[1])
if t[2] == res:
print("PASSED: differentRightmostBit({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print(("FAILED: differentRightmostBit({}, {}) returned {},"
"answer: {}").format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
|
|
3345dc2f1ac15f06d3e95b5ead894ee9d3a27d9e
|
piwrite.py
|
piwrite.py
|
#!/bin/env python
import argparse
import sys
import os
parser = argparse.ArgumentParser(description="Write multiple svgs from stdin to files")
parser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')
args = parser.parse_args()
base, extension = os.path.splitext(args.outfile)
def write_files(collection):
for i,s in enumerate(collection):
f = open(base + "%06d" % i + extension, 'w')
f.write(s)
f.close()
write_files(sys.stdin.readlines())
|
Add file writer utility script
|
Add file writer utility script
|
Python
|
epl-1.0
|
rbuchmann/pivicl
|
Add file writer utility script
|
#!/bin/env python
import argparse
import sys
import os
parser = argparse.ArgumentParser(description="Write multiple svgs from stdin to files")
parser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')
args = parser.parse_args()
base, extension = os.path.splitext(args.outfile)
def write_files(collection):
for i,s in enumerate(collection):
f = open(base + "%06d" % i + extension, 'w')
f.write(s)
f.close()
write_files(sys.stdin.readlines())
|
<commit_before><commit_msg>Add file writer utility script<commit_after>
|
#!/bin/env python
import argparse
import sys
import os
parser = argparse.ArgumentParser(description="Write multiple svgs from stdin to files")
parser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')
args = parser.parse_args()
base, extension = os.path.splitext(args.outfile)
def write_files(collection):
for i,s in enumerate(collection):
f = open(base + "%06d" % i + extension, 'w')
f.write(s)
f.close()
write_files(sys.stdin.readlines())
|
Add file writer utility script#!/bin/env python
import argparse
import sys
import os
parser = argparse.ArgumentParser(description="Write multiple svgs from stdin to files")
parser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')
args = parser.parse_args()
base, extension = os.path.splitext(args.outfile)
def write_files(collection):
for i,s in enumerate(collection):
f = open(base + "%06d" % i + extension, 'w')
f.write(s)
f.close()
write_files(sys.stdin.readlines())
|
<commit_before><commit_msg>Add file writer utility script<commit_after>#!/bin/env python
import argparse
import sys
import os
parser = argparse.ArgumentParser(description="Write multiple svgs from stdin to files")
parser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')
args = parser.parse_args()
base, extension = os.path.splitext(args.outfile)
def write_files(collection):
for i,s in enumerate(collection):
f = open(base + "%06d" % i + extension, 'w')
f.write(s)
f.close()
write_files(sys.stdin.readlines())
|
|
a3df0567c295f0b2879c9a4f095a31108359d531
|
nodeconductor/billing/migrations/0003_invoice_status.py
|
nodeconductor/billing/migrations/0003_invoice_status.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('billing', '0002_pricelist'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='status',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]
|
Add missing migration for invoice status
|
Add missing migration for invoice status
- ITACLOUD-4886
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Add missing migration for invoice status
- ITACLOUD-4886
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('billing', '0002_pricelist'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='status',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for invoice status
- ITACLOUD-4886<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('billing', '0002_pricelist'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='status',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]
|
Add missing migration for invoice status
- ITACLOUD-4886# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('billing', '0002_pricelist'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='status',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration for invoice status
- ITACLOUD-4886<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('billing', '0002_pricelist'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='status',
field=models.CharField(max_length=80, blank=True),
preserve_default=True,
),
]
|
|
7b40a4902d1dc43c73a7858fc9286a641b3a9666
|
assess_isoform_quantification/options.py
|
assess_isoform_quantification/options.py
|
from schema import Schema
def validate_file_option(file_option, msg):
msg = "{msg} '{file}'.".format(msg=msg, file=file_option)
return Schema(open, error=msg).validate(file_option)
|
Add validation function removed from main script.
|
Add validation function removed from main script.
|
Python
|
mit
|
COMBINE-lab/piquant,lweasel/piquant,lweasel/piquant
|
Add validation function removed from main script.
|
from schema import Schema
def validate_file_option(file_option, msg):
msg = "{msg} '{file}'.".format(msg=msg, file=file_option)
return Schema(open, error=msg).validate(file_option)
|
<commit_before><commit_msg>Add validation function removed from main script.<commit_after>
|
from schema import Schema
def validate_file_option(file_option, msg):
msg = "{msg} '{file}'.".format(msg=msg, file=file_option)
return Schema(open, error=msg).validate(file_option)
|
Add validation function removed from main script.from schema import Schema
def validate_file_option(file_option, msg):
msg = "{msg} '{file}'.".format(msg=msg, file=file_option)
return Schema(open, error=msg).validate(file_option)
|
<commit_before><commit_msg>Add validation function removed from main script.<commit_after>from schema import Schema
def validate_file_option(file_option, msg):
msg = "{msg} '{file}'.".format(msg=msg, file=file_option)
return Schema(open, error=msg).validate(file_option)
|
|
e304aae71617cdba0ffcb720a24406375fb866a1
|
Sketches/MH/audio/ToWAV.py
|
Sketches/MH/audio/ToWAV.py
|
from Axon.Component import component
import string
import struct
from Axon.Ipc import producerFinished, shutdown
class PCMToWave(component):
def __init__(self, bytespersample, samplingfrequency):
super(PCMToWave, self).__init__()
self.bytespersample = bytespersample
self.samplingfrequency = samplingfrequency
if self.bytespersample not in [2,4]:
print "Currently bytespersample must be 2 or 4"
raise ValueError
bytestofunction = { 2: self.sample2Byte, 4: self.sample4Byte }
self.pack = bytestofunction[self.bytespersample]
def sample2Byte(self, value):
return struct.pack("<h", int(value * 32768.0))
def sample4Byte(self, value):
return struct.pack("<l", int(value * 2147483648.0))
def main(self):
#we don't know the length yet, so we say the file lasts an arbitrary (long) time
riffchunk = "RIFF" + struct.pack("<L", 0xEFFFFFFF) + "WAVE"
bytespersecond = self.bytespersample * self.samplingfrequency
formatchunk = "fmt "
formatchunk += struct.pack("<L", 0x10) #16 for PCM
formatchunk += struct.pack("<H", 0x01) #PCM/Linear quantization
formatchunk += struct.pack("<H", 0x01) #mono
formatchunk += struct.pack("<L", self.samplingfrequency)
formatchunk += struct.pack("<L", bytespersecond)
formatchunk += struct.pack("<H", self.bytespersample)
formatchunk += struct.pack("<H", self.bytespersample * 8)
self.send(riffchunk, "outbox")
self.send(formatchunk, "outbox")
datachunkheader = "data" + struct.pack("<L", 0xEFFFFFFF) #again, an arbitrary (large) value
self.send(datachunkheader, "outbox")
running = True
while running:
yield 1
codedsamples = []
while self.dataReady("inbox"): # we accept lists of floats
samplelist = self.recv("inbox")
for sample in samplelist:
if sample < -1:
sample = -1
elif sample > 1:
sample = 1
codedsamples.append(self.pack(sample))
del samplelist
if codedsamples:
self.send(string.join(codedsamples, ""), "outbox")
while self.dataReady("control"): # we accept lists of floats
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdown):
return
self.pause()
|
Copy of Ryan's PCMToWave component.
|
Copy of Ryan's PCMToWave component.
Matt
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
Copy of Ryan's PCMToWave component.
Matt
|
from Axon.Component import component
import string
import struct
from Axon.Ipc import producerFinished, shutdown
class PCMToWave(component):
def __init__(self, bytespersample, samplingfrequency):
super(PCMToWave, self).__init__()
self.bytespersample = bytespersample
self.samplingfrequency = samplingfrequency
if self.bytespersample not in [2,4]:
print "Currently bytespersample must be 2 or 4"
raise ValueError
bytestofunction = { 2: self.sample2Byte, 4: self.sample4Byte }
self.pack = bytestofunction[self.bytespersample]
def sample2Byte(self, value):
return struct.pack("<h", int(value * 32768.0))
def sample4Byte(self, value):
return struct.pack("<l", int(value * 2147483648.0))
def main(self):
#we don't know the length yet, so we say the file lasts an arbitrary (long) time
riffchunk = "RIFF" + struct.pack("<L", 0xEFFFFFFF) + "WAVE"
bytespersecond = self.bytespersample * self.samplingfrequency
formatchunk = "fmt "
formatchunk += struct.pack("<L", 0x10) #16 for PCM
formatchunk += struct.pack("<H", 0x01) #PCM/Linear quantization
formatchunk += struct.pack("<H", 0x01) #mono
formatchunk += struct.pack("<L", self.samplingfrequency)
formatchunk += struct.pack("<L", bytespersecond)
formatchunk += struct.pack("<H", self.bytespersample)
formatchunk += struct.pack("<H", self.bytespersample * 8)
self.send(riffchunk, "outbox")
self.send(formatchunk, "outbox")
datachunkheader = "data" + struct.pack("<L", 0xEFFFFFFF) #again, an arbitrary (large) value
self.send(datachunkheader, "outbox")
running = True
while running:
yield 1
codedsamples = []
while self.dataReady("inbox"): # we accept lists of floats
samplelist = self.recv("inbox")
for sample in samplelist:
if sample < -1:
sample = -1
elif sample > 1:
sample = 1
codedsamples.append(self.pack(sample))
del samplelist
if codedsamples:
self.send(string.join(codedsamples, ""), "outbox")
while self.dataReady("control"): # we accept lists of floats
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdown):
return
self.pause()
|
<commit_before><commit_msg>Copy of Ryan's PCMToWave component.
Matt<commit_after>
|
from Axon.Component import component
import string
import struct
from Axon.Ipc import producerFinished, shutdown
class PCMToWave(component):
def __init__(self, bytespersample, samplingfrequency):
super(PCMToWave, self).__init__()
self.bytespersample = bytespersample
self.samplingfrequency = samplingfrequency
if self.bytespersample not in [2,4]:
print "Currently bytespersample must be 2 or 4"
raise ValueError
bytestofunction = { 2: self.sample2Byte, 4: self.sample4Byte }
self.pack = bytestofunction[self.bytespersample]
def sample2Byte(self, value):
return struct.pack("<h", int(value * 32768.0))
def sample4Byte(self, value):
return struct.pack("<l", int(value * 2147483648.0))
def main(self):
#we don't know the length yet, so we say the file lasts an arbitrary (long) time
riffchunk = "RIFF" + struct.pack("<L", 0xEFFFFFFF) + "WAVE"
bytespersecond = self.bytespersample * self.samplingfrequency
formatchunk = "fmt "
formatchunk += struct.pack("<L", 0x10) #16 for PCM
formatchunk += struct.pack("<H", 0x01) #PCM/Linear quantization
formatchunk += struct.pack("<H", 0x01) #mono
formatchunk += struct.pack("<L", self.samplingfrequency)
formatchunk += struct.pack("<L", bytespersecond)
formatchunk += struct.pack("<H", self.bytespersample)
formatchunk += struct.pack("<H", self.bytespersample * 8)
self.send(riffchunk, "outbox")
self.send(formatchunk, "outbox")
datachunkheader = "data" + struct.pack("<L", 0xEFFFFFFF) #again, an arbitrary (large) value
self.send(datachunkheader, "outbox")
running = True
while running:
yield 1
codedsamples = []
while self.dataReady("inbox"): # we accept lists of floats
samplelist = self.recv("inbox")
for sample in samplelist:
if sample < -1:
sample = -1
elif sample > 1:
sample = 1
codedsamples.append(self.pack(sample))
del samplelist
if codedsamples:
self.send(string.join(codedsamples, ""), "outbox")
while self.dataReady("control"): # we accept lists of floats
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdown):
return
self.pause()
|
Copy of Ryan's PCMToWave component.
Mattfrom Axon.Component import component
import string
import struct
from Axon.Ipc import producerFinished, shutdown
class PCMToWave(component):
def __init__(self, bytespersample, samplingfrequency):
super(PCMToWave, self).__init__()
self.bytespersample = bytespersample
self.samplingfrequency = samplingfrequency
if self.bytespersample not in [2,4]:
print "Currently bytespersample must be 2 or 4"
raise ValueError
bytestofunction = { 2: self.sample2Byte, 4: self.sample4Byte }
self.pack = bytestofunction[self.bytespersample]
def sample2Byte(self, value):
return struct.pack("<h", int(value * 32768.0))
def sample4Byte(self, value):
return struct.pack("<l", int(value * 2147483648.0))
def main(self):
#we don't know the length yet, so we say the file lasts an arbitrary (long) time
riffchunk = "RIFF" + struct.pack("<L", 0xEFFFFFFF) + "WAVE"
bytespersecond = self.bytespersample * self.samplingfrequency
formatchunk = "fmt "
formatchunk += struct.pack("<L", 0x10) #16 for PCM
formatchunk += struct.pack("<H", 0x01) #PCM/Linear quantization
formatchunk += struct.pack("<H", 0x01) #mono
formatchunk += struct.pack("<L", self.samplingfrequency)
formatchunk += struct.pack("<L", bytespersecond)
formatchunk += struct.pack("<H", self.bytespersample)
formatchunk += struct.pack("<H", self.bytespersample * 8)
self.send(riffchunk, "outbox")
self.send(formatchunk, "outbox")
datachunkheader = "data" + struct.pack("<L", 0xEFFFFFFF) #again, an arbitrary (large) value
self.send(datachunkheader, "outbox")
running = True
while running:
yield 1
codedsamples = []
while self.dataReady("inbox"): # we accept lists of floats
samplelist = self.recv("inbox")
for sample in samplelist:
if sample < -1:
sample = -1
elif sample > 1:
sample = 1
codedsamples.append(self.pack(sample))
del samplelist
if codedsamples:
self.send(string.join(codedsamples, ""), "outbox")
while self.dataReady("control"): # we accept lists of floats
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdown):
return
self.pause()
|
<commit_before><commit_msg>Copy of Ryan's PCMToWave component.
Matt<commit_after>from Axon.Component import component
import string
import struct
from Axon.Ipc import producerFinished, shutdown
class PCMToWave(component):
def __init__(self, bytespersample, samplingfrequency):
super(PCMToWave, self).__init__()
self.bytespersample = bytespersample
self.samplingfrequency = samplingfrequency
if self.bytespersample not in [2,4]:
print "Currently bytespersample must be 2 or 4"
raise ValueError
bytestofunction = { 2: self.sample2Byte, 4: self.sample4Byte }
self.pack = bytestofunction[self.bytespersample]
def sample2Byte(self, value):
return struct.pack("<h", int(value * 32768.0))
def sample4Byte(self, value):
return struct.pack("<l", int(value * 2147483648.0))
def main(self):
#we don't know the length yet, so we say the file lasts an arbitrary (long) time
riffchunk = "RIFF" + struct.pack("<L", 0xEFFFFFFF) + "WAVE"
bytespersecond = self.bytespersample * self.samplingfrequency
formatchunk = "fmt "
formatchunk += struct.pack("<L", 0x10) #16 for PCM
formatchunk += struct.pack("<H", 0x01) #PCM/Linear quantization
formatchunk += struct.pack("<H", 0x01) #mono
formatchunk += struct.pack("<L", self.samplingfrequency)
formatchunk += struct.pack("<L", bytespersecond)
formatchunk += struct.pack("<H", self.bytespersample)
formatchunk += struct.pack("<H", self.bytespersample * 8)
self.send(riffchunk, "outbox")
self.send(formatchunk, "outbox")
datachunkheader = "data" + struct.pack("<L", 0xEFFFFFFF) #again, an arbitrary (large) value
self.send(datachunkheader, "outbox")
running = True
while running:
yield 1
codedsamples = []
while self.dataReady("inbox"): # we accept lists of floats
samplelist = self.recv("inbox")
for sample in samplelist:
if sample < -1:
sample = -1
elif sample > 1:
sample = 1
codedsamples.append(self.pack(sample))
del samplelist
if codedsamples:
self.send(string.join(codedsamples, ""), "outbox")
while self.dataReady("control"): # we accept lists of floats
msg = self.recv("control")
if isinstance(msg, producerFinished) or isinstance(msg, shutdown):
return
self.pause()
|
|
16275938769c16c79b89349612e8e7b2891de815
|
kolibri/auth/migrations/0008_auto_20180222_1244.py
|
kolibri/auth/migrations/0008_auto_20180222_1244.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-22 20:44
from __future__ import unicode_literals
import kolibri.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0007_auto_20171226_1125'),
]
operations = [
migrations.AlterModelManagers(
name='facilityuser',
managers=[
('objects', kolibri.auth.models.FacilityUserModelManager()),
],
),
]
|
Add migration for user manager
|
Add migration for user manager
|
Python
|
mit
|
benjaoming/kolibri,jonboiser/kolibri,christianmemije/kolibri,lyw07/kolibri,mrpau/kolibri,mrpau/kolibri,DXCanas/kolibri,mrpau/kolibri,benjaoming/kolibri,lyw07/kolibri,mrpau/kolibri,christianmemije/kolibri,learningequality/kolibri,jonboiser/kolibri,learningequality/kolibri,lyw07/kolibri,indirectlylit/kolibri,DXCanas/kolibri,lyw07/kolibri,christianmemije/kolibri,jonboiser/kolibri,benjaoming/kolibri,DXCanas/kolibri,christianmemije/kolibri,learningequality/kolibri,learningequality/kolibri,jonboiser/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,DXCanas/kolibri,benjaoming/kolibri
|
Add migration for user manager
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-22 20:44
from __future__ import unicode_literals
import kolibri.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0007_auto_20171226_1125'),
]
operations = [
migrations.AlterModelManagers(
name='facilityuser',
managers=[
('objects', kolibri.auth.models.FacilityUserModelManager()),
],
),
]
|
<commit_before><commit_msg>Add migration for user manager<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-22 20:44
from __future__ import unicode_literals
import kolibri.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0007_auto_20171226_1125'),
]
operations = [
migrations.AlterModelManagers(
name='facilityuser',
managers=[
('objects', kolibri.auth.models.FacilityUserModelManager()),
],
),
]
|
Add migration for user manager# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-22 20:44
from __future__ import unicode_literals
import kolibri.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0007_auto_20171226_1125'),
]
operations = [
migrations.AlterModelManagers(
name='facilityuser',
managers=[
('objects', kolibri.auth.models.FacilityUserModelManager()),
],
),
]
|
<commit_before><commit_msg>Add migration for user manager<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-22 20:44
from __future__ import unicode_literals
import kolibri.auth.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0007_auto_20171226_1125'),
]
operations = [
migrations.AlterModelManagers(
name='facilityuser',
managers=[
('objects', kolibri.auth.models.FacilityUserModelManager()),
],
),
]
|
|
883aac8a282d4525e82d3eb151ea293c5577424c
|
core/migrations/0002_auto_20141008_0853.py
|
core/migrations/0002_auto_20141008_0853.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.create(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.create(user=user,
documento='00000000A')
def delete_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.get(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.get(user=user).delete()
user.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RunPython(create_extra_users, delete_extra_users),
]
|
Add data migration to create gesinv
|
Add data migration to create gesinv
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
Add data migration to create gesinv
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.create(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.create(user=user,
documento='00000000A')
def delete_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.get(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.get(user=user).delete()
user.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RunPython(create_extra_users, delete_extra_users),
]
|
<commit_before><commit_msg>Add data migration to create gesinv<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.create(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.create(user=user,
documento='00000000A')
def delete_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.get(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.get(user=user).delete()
user.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RunPython(create_extra_users, delete_extra_users),
]
|
Add data migration to create gesinv# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.create(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.create(user=user,
documento='00000000A')
def delete_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.get(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.get(user=user).delete()
user.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RunPython(create_extra_users, delete_extra_users),
]
|
<commit_before><commit_msg>Add data migration to create gesinv<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.create(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.create(user=user,
documento='00000000A')
def delete_extra_users(apps, schema_editor):
user = apps.get_model("auth.User").objects.get(username='GesInv-ULL')
apps.get_model("core", "UserProfile").objects.get(user=user).delete()
user.delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RunPython(create_extra_users, delete_extra_users),
]
|
|
e6181c5d7c95af23ee6d51d125642104782f5cf1
|
Python/136_SingleNumber.py
|
Python/136_SingleNumber.py
|
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
#Using XOR to find the single number.
#Because every number appears twice, while N^N=0, 0^N=N,
#XOR is cummutative, so the order of elements does not matter.
#Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber
res = 0
for num in nums:
res ^= num
return res
nums = [1,1,5,5,3,4,4,9,9,8,8,7,7]
foo = Solution()
print foo.singleNumber(nums)
|
Add solution for 136_Single Number with XOR operation.
|
Add solution for 136_Single Number with XOR operation.
|
Python
|
mit
|
comicxmz001/LeetCode,comicxmz001/LeetCode
|
Add solution for 136_Single Number with XOR operation.
|
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
#Using XOR to find the single number.
#Because every number appears twice, while N^N=0, 0^N=N,
#XOR is cummutative, so the order of elements does not matter.
#Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber
res = 0
for num in nums:
res ^= num
return res
nums = [1,1,5,5,3,4,4,9,9,8,8,7,7]
foo = Solution()
print foo.singleNumber(nums)
|
<commit_before><commit_msg>Add solution for 136_Single Number with XOR operation.<commit_after>
|
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
#Using XOR to find the single number.
#Because every number appears twice, while N^N=0, 0^N=N,
#XOR is cummutative, so the order of elements does not matter.
#Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber
res = 0
for num in nums:
res ^= num
return res
nums = [1,1,5,5,3,4,4,9,9,8,8,7,7]
foo = Solution()
print foo.singleNumber(nums)
|
Add solution for 136_Single Number with XOR operation.class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
#Using XOR to find the single number.
#Because every number appears twice, while N^N=0, 0^N=N,
#XOR is cummutative, so the order of elements does not matter.
#Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber
res = 0
for num in nums:
res ^= num
return res
nums = [1,1,5,5,3,4,4,9,9,8,8,7,7]
foo = Solution()
print foo.singleNumber(nums)
|
<commit_before><commit_msg>Add solution for 136_Single Number with XOR operation.<commit_after>class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
#Using XOR to find the single number.
#Because every number appears twice, while N^N=0, 0^N=N,
#XOR is cummutative, so the order of elements does not matter.
#Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber
res = 0
for num in nums:
res ^= num
return res
nums = [1,1,5,5,3,4,4,9,9,8,8,7,7]
foo = Solution()
print foo.singleNumber(nums)
|
|
95edeaa711e8c33e1b431f792e0f2638126ed461
|
pymtl/tools/translation/dynamic_ast_test.py
|
pymtl/tools/translation/dynamic_ast_test.py
|
#=======================================================================
# verilog_from_ast_test.py
#=======================================================================
# This is the test case that verifies the dynamic AST support of PyMTL.
# This test is contributed by Zhuanhao Wu through #169, #170 of PyMTL v2.
#
# Author : Zhuanhao Wu, Peitian Pan
# Date : Jan 23, 2019
import pytest
import random
from ast import *
from pymtl import *
from pclib.test import run_test_vector_sim
from verilator_sim import TranslationTool
pytestmark = requires_verilator
class ASTRTLModel(Model):
def __init__( s ):
s.a = InPort(2)
s.b = InPort(2)
s.out = OutPort(2)
# a simple clocked adder
# @s.posedge_clk
# def logic():
# s.out.next = s.a + s.b
# generate the model from ast
tree = Module(body=[
FunctionDef(name='logic', args=arguments(args=[], defaults=[]),
body= [
Assign(targets=[
Attribute(value=Attribute(value=Name(id='s', ctx=Load()), attr='out', ctx=Load()), attr='next', ctx=Store())
],
value=BinOp(left=Attribute(value=Name(id='s', ctx=Load()), attr='a', ctx=Load()), op=Add(), right=Attribute(value=Name(id='s', ctx=Load()), attr='b', ctx=Load()))
)
],
decorator_list=[
Attribute(value=Name(id='s', ctx=Load()), attr='posedge_clk', ctx=Load())
],
returns=None)
])
tree = fix_missing_locations(tree)
# Specifiy the union of globals() and locals() so the free
# variables in the closure can be captured.
exec(compile(tree, filename='<ast>', mode='exec')) in globals().update( locals() )
# As with #175, the user needs to supplement the dynamic AST to
# the .ast field of the generated function object.
logic.ast = tree
def test_ast_rtl_model_works_in_simulation():
mod = ASTRTLModel()
test_vector_table = [('a', 'b', 'out*')]
last_result = '?'
for i in xrange(3):
rv1 = Bits(2, random.randint(0, 3))
rv2 = Bits(2, random.randint(0, 3))
test_vector_table.append( [ rv1, rv2, last_result ] )
last_result = Bits(2, rv1 + rv2)
run_test_vector_sim(mod, test_vector_table)
def test_ast_rtl_model_to_verilog():
mod = ASTRTLModel()
# TranslationTool should successfully compile ASTRTLModel
tool = TranslationTool(mod)
|
Add test case for dynamic ast
|
[dynamic-ast] Add test case for dynamic ast
|
Python
|
bsd-3-clause
|
cornell-brg/pymtl,cornell-brg/pymtl,cornell-brg/pymtl
|
[dynamic-ast] Add test case for dynamic ast
|
#=======================================================================
# verilog_from_ast_test.py
#=======================================================================
# This is the test case that verifies the dynamic AST support of PyMTL.
# This test is contributed by Zhuanhao Wu through #169, #170 of PyMTL v2.
#
# Author : Zhuanhao Wu, Peitian Pan
# Date : Jan 23, 2019
import pytest
import random
from ast import *
from pymtl import *
from pclib.test import run_test_vector_sim
from verilator_sim import TranslationTool
pytestmark = requires_verilator
class ASTRTLModel(Model):
def __init__( s ):
s.a = InPort(2)
s.b = InPort(2)
s.out = OutPort(2)
# a simple clocked adder
# @s.posedge_clk
# def logic():
# s.out.next = s.a + s.b
# generate the model from ast
tree = Module(body=[
FunctionDef(name='logic', args=arguments(args=[], defaults=[]),
body= [
Assign(targets=[
Attribute(value=Attribute(value=Name(id='s', ctx=Load()), attr='out', ctx=Load()), attr='next', ctx=Store())
],
value=BinOp(left=Attribute(value=Name(id='s', ctx=Load()), attr='a', ctx=Load()), op=Add(), right=Attribute(value=Name(id='s', ctx=Load()), attr='b', ctx=Load()))
)
],
decorator_list=[
Attribute(value=Name(id='s', ctx=Load()), attr='posedge_clk', ctx=Load())
],
returns=None)
])
tree = fix_missing_locations(tree)
# Specifiy the union of globals() and locals() so the free
# variables in the closure can be captured.
exec(compile(tree, filename='<ast>', mode='exec')) in globals().update( locals() )
# As with #175, the user needs to supplement the dynamic AST to
# the .ast field of the generated function object.
logic.ast = tree
def test_ast_rtl_model_works_in_simulation():
mod = ASTRTLModel()
test_vector_table = [('a', 'b', 'out*')]
last_result = '?'
for i in xrange(3):
rv1 = Bits(2, random.randint(0, 3))
rv2 = Bits(2, random.randint(0, 3))
test_vector_table.append( [ rv1, rv2, last_result ] )
last_result = Bits(2, rv1 + rv2)
run_test_vector_sim(mod, test_vector_table)
def test_ast_rtl_model_to_verilog():
mod = ASTRTLModel()
# TranslationTool should successfully compile ASTRTLModel
tool = TranslationTool(mod)
|
<commit_before><commit_msg>[dynamic-ast] Add test case for dynamic ast<commit_after>
|
#=======================================================================
# verilog_from_ast_test.py
#=======================================================================
# This is the test case that verifies the dynamic AST support of PyMTL.
# This test is contributed by Zhuanhao Wu through #169, #170 of PyMTL v2.
#
# Author : Zhuanhao Wu, Peitian Pan
# Date : Jan 23, 2019
import pytest
import random
from ast import *
from pymtl import *
from pclib.test import run_test_vector_sim
from verilator_sim import TranslationTool
pytestmark = requires_verilator
class ASTRTLModel(Model):
def __init__( s ):
s.a = InPort(2)
s.b = InPort(2)
s.out = OutPort(2)
# a simple clocked adder
# @s.posedge_clk
# def logic():
# s.out.next = s.a + s.b
# generate the model from ast
tree = Module(body=[
FunctionDef(name='logic', args=arguments(args=[], defaults=[]),
body= [
Assign(targets=[
Attribute(value=Attribute(value=Name(id='s', ctx=Load()), attr='out', ctx=Load()), attr='next', ctx=Store())
],
value=BinOp(left=Attribute(value=Name(id='s', ctx=Load()), attr='a', ctx=Load()), op=Add(), right=Attribute(value=Name(id='s', ctx=Load()), attr='b', ctx=Load()))
)
],
decorator_list=[
Attribute(value=Name(id='s', ctx=Load()), attr='posedge_clk', ctx=Load())
],
returns=None)
])
tree = fix_missing_locations(tree)
# Specifiy the union of globals() and locals() so the free
# variables in the closure can be captured.
exec(compile(tree, filename='<ast>', mode='exec')) in globals().update( locals() )
# As with #175, the user needs to supplement the dynamic AST to
# the .ast field of the generated function object.
logic.ast = tree
def test_ast_rtl_model_works_in_simulation():
mod = ASTRTLModel()
test_vector_table = [('a', 'b', 'out*')]
last_result = '?'
for i in xrange(3):
rv1 = Bits(2, random.randint(0, 3))
rv2 = Bits(2, random.randint(0, 3))
test_vector_table.append( [ rv1, rv2, last_result ] )
last_result = Bits(2, rv1 + rv2)
run_test_vector_sim(mod, test_vector_table)
def test_ast_rtl_model_to_verilog():
mod = ASTRTLModel()
# TranslationTool should successfully compile ASTRTLModel
tool = TranslationTool(mod)
|
[dynamic-ast] Add test case for dynamic ast#=======================================================================
# verilog_from_ast_test.py
#=======================================================================
# This is the test case that verifies the dynamic AST support of PyMTL.
# This test is contributed by Zhuanhao Wu through #169, #170 of PyMTL v2.
#
# Author : Zhuanhao Wu, Peitian Pan
# Date : Jan 23, 2019
import pytest
import random
from ast import *
from pymtl import *
from pclib.test import run_test_vector_sim
from verilator_sim import TranslationTool
pytestmark = requires_verilator
class ASTRTLModel(Model):
def __init__( s ):
s.a = InPort(2)
s.b = InPort(2)
s.out = OutPort(2)
# a simple clocked adder
# @s.posedge_clk
# def logic():
# s.out.next = s.a + s.b
# generate the model from ast
tree = Module(body=[
FunctionDef(name='logic', args=arguments(args=[], defaults=[]),
body= [
Assign(targets=[
Attribute(value=Attribute(value=Name(id='s', ctx=Load()), attr='out', ctx=Load()), attr='next', ctx=Store())
],
value=BinOp(left=Attribute(value=Name(id='s', ctx=Load()), attr='a', ctx=Load()), op=Add(), right=Attribute(value=Name(id='s', ctx=Load()), attr='b', ctx=Load()))
)
],
decorator_list=[
Attribute(value=Name(id='s', ctx=Load()), attr='posedge_clk', ctx=Load())
],
returns=None)
])
tree = fix_missing_locations(tree)
# Specifiy the union of globals() and locals() so the free
# variables in the closure can be captured.
exec(compile(tree, filename='<ast>', mode='exec')) in globals().update( locals() )
# As with #175, the user needs to supplement the dynamic AST to
# the .ast field of the generated function object.
logic.ast = tree
def test_ast_rtl_model_works_in_simulation():
mod = ASTRTLModel()
test_vector_table = [('a', 'b', 'out*')]
last_result = '?'
for i in xrange(3):
rv1 = Bits(2, random.randint(0, 3))
rv2 = Bits(2, random.randint(0, 3))
test_vector_table.append( [ rv1, rv2, last_result ] )
last_result = Bits(2, rv1 + rv2)
run_test_vector_sim(mod, test_vector_table)
def test_ast_rtl_model_to_verilog():
mod = ASTRTLModel()
# TranslationTool should successfully compile ASTRTLModel
tool = TranslationTool(mod)
|
<commit_before><commit_msg>[dynamic-ast] Add test case for dynamic ast<commit_after>#=======================================================================
# verilog_from_ast_test.py
#=======================================================================
# This is the test case that verifies the dynamic AST support of PyMTL.
# This test is contributed by Zhuanhao Wu through #169, #170 of PyMTL v2.
#
# Author : Zhuanhao Wu, Peitian Pan
# Date : Jan 23, 2019
import pytest
import random
from ast import *
from pymtl import *
from pclib.test import run_test_vector_sim
from verilator_sim import TranslationTool
pytestmark = requires_verilator
class ASTRTLModel(Model):
def __init__( s ):
s.a = InPort(2)
s.b = InPort(2)
s.out = OutPort(2)
# a simple clocked adder
# @s.posedge_clk
# def logic():
# s.out.next = s.a + s.b
# generate the model from ast
tree = Module(body=[
FunctionDef(name='logic', args=arguments(args=[], defaults=[]),
body= [
Assign(targets=[
Attribute(value=Attribute(value=Name(id='s', ctx=Load()), attr='out', ctx=Load()), attr='next', ctx=Store())
],
value=BinOp(left=Attribute(value=Name(id='s', ctx=Load()), attr='a', ctx=Load()), op=Add(), right=Attribute(value=Name(id='s', ctx=Load()), attr='b', ctx=Load()))
)
],
decorator_list=[
Attribute(value=Name(id='s', ctx=Load()), attr='posedge_clk', ctx=Load())
],
returns=None)
])
tree = fix_missing_locations(tree)
# Specifiy the union of globals() and locals() so the free
# variables in the closure can be captured.
exec(compile(tree, filename='<ast>', mode='exec')) in globals().update( locals() )
# As with #175, the user needs to supplement the dynamic AST to
# the .ast field of the generated function object.
logic.ast = tree
def test_ast_rtl_model_works_in_simulation():
mod = ASTRTLModel()
test_vector_table = [('a', 'b', 'out*')]
last_result = '?'
for i in xrange(3):
rv1 = Bits(2, random.randint(0, 3))
rv2 = Bits(2, random.randint(0, 3))
test_vector_table.append( [ rv1, rv2, last_result ] )
last_result = Bits(2, rv1 + rv2)
run_test_vector_sim(mod, test_vector_table)
def test_ast_rtl_model_to_verilog():
mod = ASTRTLModel()
# TranslationTool should successfully compile ASTRTLModel
tool = TranslationTool(mod)
|
|
1b6fecb5819fbead0aadcc1a8669e915542c5ea0
|
other/testing-game.py
|
other/testing-game.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import re
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', help='The directory to search for files in', required=False, default=os.getcwd())
args = parser.parse_args()
names = {}
for root, dirs, files in os.walk(args.directory):
for name in files:
filename, fileextension = os.path.splitext(name)
absfile = os.path.join(root, name)
if fileextension == '.m' or fileextension == '.mm':
try:
with open(absfile) as sourcefile:
source = sourcefile.read()
if source.find('XCTestCase') != -1:
p = subprocess.Popen(['git', 'blame', absfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
for blame_line in out.splitlines():
if blame_line.replace(' ', '').find('-(void)test') != -1:
blame_info = blame_line[blame_line.find('(')+1:]
blame_info = blame_info[:blame_info.find(')')]
blame_components = blame_info.split()
name_components = blame_components[:len(blame_components)-4]
name = ' '.join(name_components)
name_count = names.get(name, 0)
names[name] = name_count + 1
except:
'Could not open file: ' + absfile
print names
|
Add script for gameifying testing
|
Tools: Add script for gameifying testing
|
Python
|
apache-2.0
|
spotify/testing-game,spotify/testing-game,spotify/testing-game,spotify/testing-game
|
Tools: Add script for gameifying testing
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import re
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', help='The directory to search for files in', required=False, default=os.getcwd())
args = parser.parse_args()
names = {}
for root, dirs, files in os.walk(args.directory):
for name in files:
filename, fileextension = os.path.splitext(name)
absfile = os.path.join(root, name)
if fileextension == '.m' or fileextension == '.mm':
try:
with open(absfile) as sourcefile:
source = sourcefile.read()
if source.find('XCTestCase') != -1:
p = subprocess.Popen(['git', 'blame', absfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
for blame_line in out.splitlines():
if blame_line.replace(' ', '').find('-(void)test') != -1:
blame_info = blame_line[blame_line.find('(')+1:]
blame_info = blame_info[:blame_info.find(')')]
blame_components = blame_info.split()
name_components = blame_components[:len(blame_components)-4]
name = ' '.join(name_components)
name_count = names.get(name, 0)
names[name] = name_count + 1
except:
'Could not open file: ' + absfile
print names
|
<commit_before><commit_msg>Tools: Add script for gameifying testing<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import re
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', help='The directory to search for files in', required=False, default=os.getcwd())
args = parser.parse_args()
names = {}
for root, dirs, files in os.walk(args.directory):
for name in files:
filename, fileextension = os.path.splitext(name)
absfile = os.path.join(root, name)
if fileextension == '.m' or fileextension == '.mm':
try:
with open(absfile) as sourcefile:
source = sourcefile.read()
if source.find('XCTestCase') != -1:
p = subprocess.Popen(['git', 'blame', absfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
for blame_line in out.splitlines():
if blame_line.replace(' ', '').find('-(void)test') != -1:
blame_info = blame_line[blame_line.find('(')+1:]
blame_info = blame_info[:blame_info.find(')')]
blame_components = blame_info.split()
name_components = blame_components[:len(blame_components)-4]
name = ' '.join(name_components)
name_count = names.get(name, 0)
names[name] = name_count + 1
except:
'Could not open file: ' + absfile
print names
|
Tools: Add script for gameifying testing#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import re
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', help='The directory to search for files in', required=False, default=os.getcwd())
args = parser.parse_args()
names = {}
for root, dirs, files in os.walk(args.directory):
for name in files:
filename, fileextension = os.path.splitext(name)
absfile = os.path.join(root, name)
if fileextension == '.m' or fileextension == '.mm':
try:
with open(absfile) as sourcefile:
source = sourcefile.read()
if source.find('XCTestCase') != -1:
p = subprocess.Popen(['git', 'blame', absfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
for blame_line in out.splitlines():
if blame_line.replace(' ', '').find('-(void)test') != -1:
blame_info = blame_line[blame_line.find('(')+1:]
blame_info = blame_info[:blame_info.find(')')]
blame_components = blame_info.split()
name_components = blame_components[:len(blame_components)-4]
name = ' '.join(name_components)
name_count = names.get(name, 0)
names[name] = name_count + 1
except:
'Could not open file: ' + absfile
print names
|
<commit_before><commit_msg>Tools: Add script for gameifying testing<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os
import subprocess
import re
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', help='The directory to search for files in', required=False, default=os.getcwd())
args = parser.parse_args()
names = {}
for root, dirs, files in os.walk(args.directory):
for name in files:
filename, fileextension = os.path.splitext(name)
absfile = os.path.join(root, name)
if fileextension == '.m' or fileextension == '.mm':
try:
with open(absfile) as sourcefile:
source = sourcefile.read()
if source.find('XCTestCase') != -1:
p = subprocess.Popen(['git', 'blame', absfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
for blame_line in out.splitlines():
if blame_line.replace(' ', '').find('-(void)test') != -1:
blame_info = blame_line[blame_line.find('(')+1:]
blame_info = blame_info[:blame_info.find(')')]
blame_components = blame_info.split()
name_components = blame_components[:len(blame_components)-4]
name = ' '.join(name_components)
name_count = names.get(name, 0)
names[name] = name_count + 1
except:
'Could not open file: ' + absfile
print names
|
|
940299a7bfd967653899b176ce76e6f1cf02ca83
|
liwcpairs2es.py
|
liwcpairs2es.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from elasticsearch import Elasticsearch, helpers
from collections import Counter
from datetime import datetime
def find_pairs(list1, list2):
pairs = []
if list1 and list2:
for item1 in list1:
for item2 in list2:
pairs.append(u'{}@{}'.format(item1, item2))
return pairs
es = Elasticsearch()
index_name = 'embem'
doc_type = 'event'
cat1 = 'Body'
cat2 = 'Posemo'
timestamp = datetime.now().isoformat()
pairs_count = Counter()
years = {}
q = {
"query": {
"wildcard": {"text_id": "*"}
}
}
results = helpers.scan(client=es, query=q, index=index_name, doc_type=doc_type)
for r in results:
# get tags
cat1_tags = r.get('_source').get('liwc-entities').get('data').get(cat1)
cat2_tags = r.get('_source').get('liwc-entities').get('data').get(cat2)
# find all pairs
pairs = find_pairs(cat1_tags, cat2_tags)
if pairs:
for pair in pairs:
pairs_count[pair] += 1
year = r.get('_source').get('year')
if year not in years.keys():
years[year] = Counter()
years[year][pair] += 1
# save pairs to ES
doc = {
'doc': {
'pairs-{}-{}'.format(cat1, cat2): {
'data': pairs,
'num_pairs': len(pairs),
'timestamp': timestamp
}
}
}
es.update(index=index_name, doc_type=doc_type,
id=r.get('_id'), body=doc)
sorted_years = years.keys()
sorted_years.sort()
print '{}\t{}\tFrequency'.format(cat1, cat2) + \
''.join(['\t{}'.format(k) for k in sorted_years])
print 'TOTAL\tTOTAL\t{}'.format(sum(pairs_count.values())) + \
''.join(['\t{}'.format(sum(years[k].values())) for k in sorted_years])
for p, f in pairs_count.most_common():
(w1, w2) = p.split('@')
print u'{}\t{}\t{}'.format(w1, w2, f).encode('utf-8') + \
''.join(['\t{}'.format(years[k][p]) for k in sorted_years])
|
Add script to generate pairs of LIWC categories
|
Add script to generate pairs of LIWC categories
Added a script that generates pairs of LIWC categories for Body and
Posemo words. The pairs are added to the index. And data about the pairs
is written to std out.
The LIWC categories for which pairs are generated are hardcoded. This
should be changed. Also adding the pairs to the ES index should be made
optional.
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add script to generate pairs of LIWC categories
Added a script that generates pairs of LIWC categories for Body and
Posemo words. The pairs are added to the index. And data about the pairs
is written to std out.
The LIWC categories for which pairs are generated are hardcoded. This
should be changed. Also adding the pairs to the ES index should be made
optional.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from elasticsearch import Elasticsearch, helpers
from collections import Counter
from datetime import datetime
def find_pairs(list1, list2):
pairs = []
if list1 and list2:
for item1 in list1:
for item2 in list2:
pairs.append(u'{}@{}'.format(item1, item2))
return pairs
es = Elasticsearch()
index_name = 'embem'
doc_type = 'event'
cat1 = 'Body'
cat2 = 'Posemo'
timestamp = datetime.now().isoformat()
pairs_count = Counter()
years = {}
q = {
"query": {
"wildcard": {"text_id": "*"}
}
}
results = helpers.scan(client=es, query=q, index=index_name, doc_type=doc_type)
for r in results:
# get tags
cat1_tags = r.get('_source').get('liwc-entities').get('data').get(cat1)
cat2_tags = r.get('_source').get('liwc-entities').get('data').get(cat2)
# find all pairs
pairs = find_pairs(cat1_tags, cat2_tags)
if pairs:
for pair in pairs:
pairs_count[pair] += 1
year = r.get('_source').get('year')
if year not in years.keys():
years[year] = Counter()
years[year][pair] += 1
# save pairs to ES
doc = {
'doc': {
'pairs-{}-{}'.format(cat1, cat2): {
'data': pairs,
'num_pairs': len(pairs),
'timestamp': timestamp
}
}
}
es.update(index=index_name, doc_type=doc_type,
id=r.get('_id'), body=doc)
sorted_years = years.keys()
sorted_years.sort()
print '{}\t{}\tFrequency'.format(cat1, cat2) + \
''.join(['\t{}'.format(k) for k in sorted_years])
print 'TOTAL\tTOTAL\t{}'.format(sum(pairs_count.values())) + \
''.join(['\t{}'.format(sum(years[k].values())) for k in sorted_years])
for p, f in pairs_count.most_common():
(w1, w2) = p.split('@')
print u'{}\t{}\t{}'.format(w1, w2, f).encode('utf-8') + \
''.join(['\t{}'.format(years[k][p]) for k in sorted_years])
|
<commit_before><commit_msg>Add script to generate pairs of LIWC categories
Added a script that generates pairs of LIWC categories for Body and
Posemo words. The pairs are added to the index. And data about the pairs
is written to std out.
The LIWC categories for which pairs are generated are hardcoded. This
should be changed. Also adding the pairs to the ES index should be made
optional.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from elasticsearch import Elasticsearch, helpers
from collections import Counter
from datetime import datetime
def find_pairs(list1, list2):
pairs = []
if list1 and list2:
for item1 in list1:
for item2 in list2:
pairs.append(u'{}@{}'.format(item1, item2))
return pairs
es = Elasticsearch()
index_name = 'embem'
doc_type = 'event'
cat1 = 'Body'
cat2 = 'Posemo'
timestamp = datetime.now().isoformat()
pairs_count = Counter()
years = {}
q = {
"query": {
"wildcard": {"text_id": "*"}
}
}
results = helpers.scan(client=es, query=q, index=index_name, doc_type=doc_type)
for r in results:
# get tags
cat1_tags = r.get('_source').get('liwc-entities').get('data').get(cat1)
cat2_tags = r.get('_source').get('liwc-entities').get('data').get(cat2)
# find all pairs
pairs = find_pairs(cat1_tags, cat2_tags)
if pairs:
for pair in pairs:
pairs_count[pair] += 1
year = r.get('_source').get('year')
if year not in years.keys():
years[year] = Counter()
years[year][pair] += 1
# save pairs to ES
doc = {
'doc': {
'pairs-{}-{}'.format(cat1, cat2): {
'data': pairs,
'num_pairs': len(pairs),
'timestamp': timestamp
}
}
}
es.update(index=index_name, doc_type=doc_type,
id=r.get('_id'), body=doc)
sorted_years = years.keys()
sorted_years.sort()
print '{}\t{}\tFrequency'.format(cat1, cat2) + \
''.join(['\t{}'.format(k) for k in sorted_years])
print 'TOTAL\tTOTAL\t{}'.format(sum(pairs_count.values())) + \
''.join(['\t{}'.format(sum(years[k].values())) for k in sorted_years])
for p, f in pairs_count.most_common():
(w1, w2) = p.split('@')
print u'{}\t{}\t{}'.format(w1, w2, f).encode('utf-8') + \
''.join(['\t{}'.format(years[k][p]) for k in sorted_years])
|
Add script to generate pairs of LIWC categories
Added a script that generates pairs of LIWC categories for Body and
Posemo words. The pairs are added to the index. And data about the pairs
is written to std out.
The LIWC categories for which pairs are generated are hardcoded. This
should be changed. Also adding the pairs to the ES index should be made
optional.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from elasticsearch import Elasticsearch, helpers
from collections import Counter
from datetime import datetime
def find_pairs(list1, list2):
pairs = []
if list1 and list2:
for item1 in list1:
for item2 in list2:
pairs.append(u'{}@{}'.format(item1, item2))
return pairs
es = Elasticsearch()
index_name = 'embem'
doc_type = 'event'
cat1 = 'Body'
cat2 = 'Posemo'
timestamp = datetime.now().isoformat()
pairs_count = Counter()
years = {}
q = {
"query": {
"wildcard": {"text_id": "*"}
}
}
results = helpers.scan(client=es, query=q, index=index_name, doc_type=doc_type)
for r in results:
# get tags
cat1_tags = r.get('_source').get('liwc-entities').get('data').get(cat1)
cat2_tags = r.get('_source').get('liwc-entities').get('data').get(cat2)
# find all pairs
pairs = find_pairs(cat1_tags, cat2_tags)
if pairs:
for pair in pairs:
pairs_count[pair] += 1
year = r.get('_source').get('year')
if year not in years.keys():
years[year] = Counter()
years[year][pair] += 1
# save pairs to ES
doc = {
'doc': {
'pairs-{}-{}'.format(cat1, cat2): {
'data': pairs,
'num_pairs': len(pairs),
'timestamp': timestamp
}
}
}
es.update(index=index_name, doc_type=doc_type,
id=r.get('_id'), body=doc)
sorted_years = years.keys()
sorted_years.sort()
print '{}\t{}\tFrequency'.format(cat1, cat2) + \
''.join(['\t{}'.format(k) for k in sorted_years])
print 'TOTAL\tTOTAL\t{}'.format(sum(pairs_count.values())) + \
''.join(['\t{}'.format(sum(years[k].values())) for k in sorted_years])
for p, f in pairs_count.most_common():
(w1, w2) = p.split('@')
print u'{}\t{}\t{}'.format(w1, w2, f).encode('utf-8') + \
''.join(['\t{}'.format(years[k][p]) for k in sorted_years])
|
<commit_before><commit_msg>Add script to generate pairs of LIWC categories
Added a script that generates pairs of LIWC categories for Body and
Posemo words. The pairs are added to the index. And data about the pairs
is written to std out.
The LIWC categories for which pairs are generated are hardcoded. This
should be changed. Also adding the pairs to the ES index should be made
optional.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from elasticsearch import Elasticsearch, helpers
from collections import Counter
from datetime import datetime
def find_pairs(list1, list2):
pairs = []
if list1 and list2:
for item1 in list1:
for item2 in list2:
pairs.append(u'{}@{}'.format(item1, item2))
return pairs
es = Elasticsearch()
index_name = 'embem'
doc_type = 'event'
cat1 = 'Body'
cat2 = 'Posemo'
timestamp = datetime.now().isoformat()
pairs_count = Counter()
years = {}
q = {
"query": {
"wildcard": {"text_id": "*"}
}
}
results = helpers.scan(client=es, query=q, index=index_name, doc_type=doc_type)
for r in results:
# get tags
cat1_tags = r.get('_source').get('liwc-entities').get('data').get(cat1)
cat2_tags = r.get('_source').get('liwc-entities').get('data').get(cat2)
# find all pairs
pairs = find_pairs(cat1_tags, cat2_tags)
if pairs:
for pair in pairs:
pairs_count[pair] += 1
year = r.get('_source').get('year')
if year not in years.keys():
years[year] = Counter()
years[year][pair] += 1
# save pairs to ES
doc = {
'doc': {
'pairs-{}-{}'.format(cat1, cat2): {
'data': pairs,
'num_pairs': len(pairs),
'timestamp': timestamp
}
}
}
es.update(index=index_name, doc_type=doc_type,
id=r.get('_id'), body=doc)
sorted_years = years.keys()
sorted_years.sort()
print '{}\t{}\tFrequency'.format(cat1, cat2) + \
''.join(['\t{}'.format(k) for k in sorted_years])
print 'TOTAL\tTOTAL\t{}'.format(sum(pairs_count.values())) + \
''.join(['\t{}'.format(sum(years[k].values())) for k in sorted_years])
for p, f in pairs_count.most_common():
(w1, w2) = p.split('@')
print u'{}\t{}\t{}'.format(w1, w2, f).encode('utf-8') + \
''.join(['\t{}'.format(years[k][p]) for k in sorted_years])
|
|
95f5b7cd2325a61f537bffb783e950b30c97da5f
|
bayespy/demos/gamma_shape.py
|
bayespy/demos/gamma_shape.py
|
from bayespy import nodes
from bayespy.inference import VB
def run():
a = nodes.GammaShape(name='a')
b = nodes.Gamma(1e-5, 1e-5, name='b')
tau = nodes.Gamma(a, b, plates=(1000,), name='tau')
tau.observe(nodes.Gamma(10, 20, plates=(1000,)).random())
Q = VB(tau, a, b)
Q.update(repeat=1000)
print("True gamma parameters:", 10.0, 20.0)
print("Estimated parameters from 1000 samples:", a.u[0], b.u[0])
if __name__ == "__main__":
run()
|
Add a demo about learning the shape parameter of gamma dist
|
DEMO: Add a demo about learning the shape parameter of gamma dist
|
Python
|
mit
|
bayespy/bayespy,fivejjs/bayespy,jluttine/bayespy,SalemAmeen/bayespy
|
DEMO: Add a demo about learning the shape parameter of gamma dist
|
from bayespy import nodes
from bayespy.inference import VB
def run():
a = nodes.GammaShape(name='a')
b = nodes.Gamma(1e-5, 1e-5, name='b')
tau = nodes.Gamma(a, b, plates=(1000,), name='tau')
tau.observe(nodes.Gamma(10, 20, plates=(1000,)).random())
Q = VB(tau, a, b)
Q.update(repeat=1000)
print("True gamma parameters:", 10.0, 20.0)
print("Estimated parameters from 1000 samples:", a.u[0], b.u[0])
if __name__ == "__main__":
run()
|
<commit_before><commit_msg>DEMO: Add a demo about learning the shape parameter of gamma dist<commit_after>
|
from bayespy import nodes
from bayespy.inference import VB
def run():
a = nodes.GammaShape(name='a')
b = nodes.Gamma(1e-5, 1e-5, name='b')
tau = nodes.Gamma(a, b, plates=(1000,), name='tau')
tau.observe(nodes.Gamma(10, 20, plates=(1000,)).random())
Q = VB(tau, a, b)
Q.update(repeat=1000)
print("True gamma parameters:", 10.0, 20.0)
print("Estimated parameters from 1000 samples:", a.u[0], b.u[0])
if __name__ == "__main__":
run()
|
DEMO: Add a demo about learning the shape parameter of gamma dist
from bayespy import nodes
from bayespy.inference import VB
def run():
a = nodes.GammaShape(name='a')
b = nodes.Gamma(1e-5, 1e-5, name='b')
tau = nodes.Gamma(a, b, plates=(1000,), name='tau')
tau.observe(nodes.Gamma(10, 20, plates=(1000,)).random())
Q = VB(tau, a, b)
Q.update(repeat=1000)
print("True gamma parameters:", 10.0, 20.0)
print("Estimated parameters from 1000 samples:", a.u[0], b.u[0])
if __name__ == "__main__":
run()
|
<commit_before><commit_msg>DEMO: Add a demo about learning the shape parameter of gamma dist<commit_after>
from bayespy import nodes
from bayespy.inference import VB
def run():
a = nodes.GammaShape(name='a')
b = nodes.Gamma(1e-5, 1e-5, name='b')
tau = nodes.Gamma(a, b, plates=(1000,), name='tau')
tau.observe(nodes.Gamma(10, 20, plates=(1000,)).random())
Q = VB(tau, a, b)
Q.update(repeat=1000)
print("True gamma parameters:", 10.0, 20.0)
print("Estimated parameters from 1000 samples:", a.u[0], b.u[0])
if __name__ == "__main__":
run()
|
|
d8fff759f2bff24f20cdbe98370ede9e5f3b7b13
|
convergence_tests/2D_helmholtz.py
|
convergence_tests/2D_helmholtz.py
|
from __future__ import absolute_import, division
from firedrake import *
import numpy as np
def helmholtz_mixed(x, V1, V2):
# Create mesh and define function space
mesh = UnitSquareMesh(2**x, 2**x)
V1 = FunctionSpace(mesh, *V1, name="V")
V2 = FunctionSpace(mesh, *V2, name="P")
W = V1 * V2
# Define variational problem
lmbda = 1
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
f = Function(V2)
f.interpolate(Expression("(1+8*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
a = (p*q - q*div(u) + lmbda*inner(v, u) + div(v)*p) * dx
L = f*q*dx
# Compute solution
x = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, x, solver_parameters=params)
# Analytical solution
f.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
u, p = x.split()
err = sqrt(assemble(dot(p - f, p - f) * dx))
return x, err
V1 = ('RT', 1)
V2 = ('DG', 0)
x, err = helmholtz_mixed(8, V1, V2)
print err
File("helmholtz_mixed.pvd").write(x.split()[0], x.split()[1])
l2errs = []
for i in range(1, 9):
l2errs.append(helmholtz_mixed(i, V1, V2)[1])
l2errs = np.array(l2errs)
conv = np.log2(l2errs[:-1] / l2errs[1:])[-1]
print conv
|
Add 2D helmholtz convergence test
|
Add 2D helmholtz convergence test
|
Python
|
mit
|
thomasgibson/firedrake-hybridization
|
Add 2D helmholtz convergence test
|
from __future__ import absolute_import, division
from firedrake import *
import numpy as np
def helmholtz_mixed(x, V1, V2):
# Create mesh and define function space
mesh = UnitSquareMesh(2**x, 2**x)
V1 = FunctionSpace(mesh, *V1, name="V")
V2 = FunctionSpace(mesh, *V2, name="P")
W = V1 * V2
# Define variational problem
lmbda = 1
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
f = Function(V2)
f.interpolate(Expression("(1+8*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
a = (p*q - q*div(u) + lmbda*inner(v, u) + div(v)*p) * dx
L = f*q*dx
# Compute solution
x = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, x, solver_parameters=params)
# Analytical solution
f.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
u, p = x.split()
err = sqrt(assemble(dot(p - f, p - f) * dx))
return x, err
V1 = ('RT', 1)
V2 = ('DG', 0)
x, err = helmholtz_mixed(8, V1, V2)
print err
File("helmholtz_mixed.pvd").write(x.split()[0], x.split()[1])
l2errs = []
for i in range(1, 9):
l2errs.append(helmholtz_mixed(i, V1, V2)[1])
l2errs = np.array(l2errs)
conv = np.log2(l2errs[:-1] / l2errs[1:])[-1]
print conv
|
<commit_before><commit_msg>Add 2D helmholtz convergence test<commit_after>
|
from __future__ import absolute_import, division
from firedrake import *
import numpy as np
def helmholtz_mixed(x, V1, V2):
# Create mesh and define function space
mesh = UnitSquareMesh(2**x, 2**x)
V1 = FunctionSpace(mesh, *V1, name="V")
V2 = FunctionSpace(mesh, *V2, name="P")
W = V1 * V2
# Define variational problem
lmbda = 1
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
f = Function(V2)
f.interpolate(Expression("(1+8*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
a = (p*q - q*div(u) + lmbda*inner(v, u) + div(v)*p) * dx
L = f*q*dx
# Compute solution
x = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, x, solver_parameters=params)
# Analytical solution
f.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
u, p = x.split()
err = sqrt(assemble(dot(p - f, p - f) * dx))
return x, err
V1 = ('RT', 1)
V2 = ('DG', 0)
x, err = helmholtz_mixed(8, V1, V2)
print err
File("helmholtz_mixed.pvd").write(x.split()[0], x.split()[1])
l2errs = []
for i in range(1, 9):
l2errs.append(helmholtz_mixed(i, V1, V2)[1])
l2errs = np.array(l2errs)
conv = np.log2(l2errs[:-1] / l2errs[1:])[-1]
print conv
|
Add 2D helmholtz convergence testfrom __future__ import absolute_import, division
from firedrake import *
import numpy as np
def helmholtz_mixed(x, V1, V2):
# Create mesh and define function space
mesh = UnitSquareMesh(2**x, 2**x)
V1 = FunctionSpace(mesh, *V1, name="V")
V2 = FunctionSpace(mesh, *V2, name="P")
W = V1 * V2
# Define variational problem
lmbda = 1
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
f = Function(V2)
f.interpolate(Expression("(1+8*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
a = (p*q - q*div(u) + lmbda*inner(v, u) + div(v)*p) * dx
L = f*q*dx
# Compute solution
x = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, x, solver_parameters=params)
# Analytical solution
f.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
u, p = x.split()
err = sqrt(assemble(dot(p - f, p - f) * dx))
return x, err
V1 = ('RT', 1)
V2 = ('DG', 0)
x, err = helmholtz_mixed(8, V1, V2)
print err
File("helmholtz_mixed.pvd").write(x.split()[0], x.split()[1])
l2errs = []
for i in range(1, 9):
l2errs.append(helmholtz_mixed(i, V1, V2)[1])
l2errs = np.array(l2errs)
conv = np.log2(l2errs[:-1] / l2errs[1:])[-1]
print conv
|
<commit_before><commit_msg>Add 2D helmholtz convergence test<commit_after>from __future__ import absolute_import, division
from firedrake import *
import numpy as np
def helmholtz_mixed(x, V1, V2):
# Create mesh and define function space
mesh = UnitSquareMesh(2**x, 2**x)
V1 = FunctionSpace(mesh, *V1, name="V")
V2 = FunctionSpace(mesh, *V2, name="P")
W = V1 * V2
# Define variational problem
lmbda = 1
u, p = TrialFunctions(W)
v, q = TestFunctions(W)
f = Function(V2)
f.interpolate(Expression("(1+8*pi*pi)*sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
a = (p*q - q*div(u) + lmbda*inner(v, u) + div(v)*p) * dx
L = f*q*dx
# Compute solution
x = Function(W)
params = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.HybridizationPC',
'hybridization': {'ksp_type': 'preonly',
'pc_type': 'lu',
'hdiv_residual': {'ksp_type': 'cg',
'ksp_rtol': 1e-14},
'use_reconstructor': True}}
solve(a == L, x, solver_parameters=params)
# Analytical solution
f.interpolate(Expression("sin(x[0]*pi*2)*sin(x[1]*pi*2)"))
u, p = x.split()
err = sqrt(assemble(dot(p - f, p - f) * dx))
return x, err
V1 = ('RT', 1)
V2 = ('DG', 0)
x, err = helmholtz_mixed(8, V1, V2)
print err
File("helmholtz_mixed.pvd").write(x.split()[0], x.split()[1])
l2errs = []
for i in range(1, 9):
l2errs.append(helmholtz_mixed(i, V1, V2)[1])
l2errs = np.array(l2errs)
conv = np.log2(l2errs[:-1] / l2errs[1:])[-1]
print conv
|
|
1cc15f3ae9a0b7fa5b2dae4bcdd9f0f3c061ce4d
|
reclama/sprints/migrations/0002_auto_20150130_1751.py
|
reclama/sprints/migrations/0002_auto_20150130_1751.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='event',
field=models.ManyToManyField(related_name='bugs', to='sprints.Event'),
preserve_default=True,
),
]
|
Fix relate_name on Bug model
|
Fix relate_name on Bug model
|
Python
|
mpl-2.0
|
mozilla/reclama,mozilla/reclama,mozilla/reclama,mozilla/reclama
|
Fix relate_name on Bug model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='event',
field=models.ManyToManyField(related_name='bugs', to='sprints.Event'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Fix relate_name on Bug model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='event',
field=models.ManyToManyField(related_name='bugs', to='sprints.Event'),
preserve_default=True,
),
]
|
Fix relate_name on Bug model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='event',
field=models.ManyToManyField(related_name='bugs', to='sprints.Event'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Fix relate_name on Bug model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='event',
field=models.ManyToManyField(related_name='bugs', to='sprints.Event'),
preserve_default=True,
),
]
|
|
dc0ecffd6c4115019cfcbcc13b17a20511888c9b
|
python/paddle/fluid/tests/unittests/test_fused_emb_seq_pool_op.py
|
python/paddle/fluid/tests/unittests/test_fused_emb_seq_pool_op.py
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid.op import Operator
import paddle.compat as cpt
class TestFusedEmbeddingSeqPoolOp(OpTest):
def setUp(self):
self.op_type = "fused_embedding_seq_pool"
self.emb_size = 2
table = np.random.random((17, self.emb_size)).astype("float32")
ids = np.array([[[4], [3]], [[4], [3]], [[2], [1]],
[[16], [1]]]).astype("int64")
merged_ids = np.array([4, 2, 16]).astype("int64")
ids_expand = np.expand_dims(ids, axis=1)
self.lod = [[3, 1]]
self.attrs = {'is_sparse': True}
self.inputs = {'W': table, 'Ids': (ids_expand, self.lod)}
self.outputs = {
'Out': np.reshape(
np.array([
table[[4, 3]] + table[[4, 3]] + table[[2, 1]],
table[[16, 1]]
]), [len(self.lod[0]), 2 * self.emb_size])
}
def test_check_output(self):
self.check_output()
if __name__ == "__main__":
unittest.main()
|
Add ut for fused ops
|
Add ut for fused ops
|
Python
|
apache-2.0
|
chengduoZH/Paddle,baidu/Paddle,tensor-tang/Paddle,chengduoZH/Paddle,baidu/Paddle,baidu/Paddle,luotao1/Paddle,baidu/Paddle,luotao1/Paddle,baidu/Paddle,luotao1/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,chengduoZH/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,luotao1/Paddle,chengduoZH/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,PaddlePaddle/Paddle,chengduoZH/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,tensor-tang/Paddle,luotao1/Paddle
|
Add ut for fused ops
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid.op import Operator
import paddle.compat as cpt
class TestFusedEmbeddingSeqPoolOp(OpTest):
def setUp(self):
self.op_type = "fused_embedding_seq_pool"
self.emb_size = 2
table = np.random.random((17, self.emb_size)).astype("float32")
ids = np.array([[[4], [3]], [[4], [3]], [[2], [1]],
[[16], [1]]]).astype("int64")
merged_ids = np.array([4, 2, 16]).astype("int64")
ids_expand = np.expand_dims(ids, axis=1)
self.lod = [[3, 1]]
self.attrs = {'is_sparse': True}
self.inputs = {'W': table, 'Ids': (ids_expand, self.lod)}
self.outputs = {
'Out': np.reshape(
np.array([
table[[4, 3]] + table[[4, 3]] + table[[2, 1]],
table[[16, 1]]
]), [len(self.lod[0]), 2 * self.emb_size])
}
def test_check_output(self):
self.check_output()
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add ut for fused ops<commit_after>
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid.op import Operator
import paddle.compat as cpt
class TestFusedEmbeddingSeqPoolOp(OpTest):
def setUp(self):
self.op_type = "fused_embedding_seq_pool"
self.emb_size = 2
table = np.random.random((17, self.emb_size)).astype("float32")
ids = np.array([[[4], [3]], [[4], [3]], [[2], [1]],
[[16], [1]]]).astype("int64")
merged_ids = np.array([4, 2, 16]).astype("int64")
ids_expand = np.expand_dims(ids, axis=1)
self.lod = [[3, 1]]
self.attrs = {'is_sparse': True}
self.inputs = {'W': table, 'Ids': (ids_expand, self.lod)}
self.outputs = {
'Out': np.reshape(
np.array([
table[[4, 3]] + table[[4, 3]] + table[[2, 1]],
table[[16, 1]]
]), [len(self.lod[0]), 2 * self.emb_size])
}
def test_check_output(self):
self.check_output()
if __name__ == "__main__":
unittest.main()
|
Add ut for fused ops# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid.op import Operator
import paddle.compat as cpt
class TestFusedEmbeddingSeqPoolOp(OpTest):
def setUp(self):
self.op_type = "fused_embedding_seq_pool"
self.emb_size = 2
table = np.random.random((17, self.emb_size)).astype("float32")
ids = np.array([[[4], [3]], [[4], [3]], [[2], [1]],
[[16], [1]]]).astype("int64")
merged_ids = np.array([4, 2, 16]).astype("int64")
ids_expand = np.expand_dims(ids, axis=1)
self.lod = [[3, 1]]
self.attrs = {'is_sparse': True}
self.inputs = {'W': table, 'Ids': (ids_expand, self.lod)}
self.outputs = {
'Out': np.reshape(
np.array([
table[[4, 3]] + table[[4, 3]] + table[[2, 1]],
table[[16, 1]]
]), [len(self.lod[0]), 2 * self.emb_size])
}
def test_check_output(self):
self.check_output()
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add ut for fused ops<commit_after># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
from paddle.fluid.op import Operator
import paddle.compat as cpt
class TestFusedEmbeddingSeqPoolOp(OpTest):
def setUp(self):
self.op_type = "fused_embedding_seq_pool"
self.emb_size = 2
table = np.random.random((17, self.emb_size)).astype("float32")
ids = np.array([[[4], [3]], [[4], [3]], [[2], [1]],
[[16], [1]]]).astype("int64")
merged_ids = np.array([4, 2, 16]).astype("int64")
ids_expand = np.expand_dims(ids, axis=1)
self.lod = [[3, 1]]
self.attrs = {'is_sparse': True}
self.inputs = {'W': table, 'Ids': (ids_expand, self.lod)}
self.outputs = {
'Out': np.reshape(
np.array([
table[[4, 3]] + table[[4, 3]] + table[[2, 1]],
table[[16, 1]]
]), [len(self.lod[0]), 2 * self.emb_size])
}
def test_check_output(self):
self.check_output()
if __name__ == "__main__":
unittest.main()
|
|
a852de81afdf8426cb243115a87856e2767a8d40
|
tests/benchmarks/constructs/InplaceOperationStringAdd.py
|
tests/benchmarks/constructs/InplaceOperationStringAdd.py
|
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Softwar where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
s = "2"
additiv = "*" * 1000
local_value = module_value1
for x in range(local_value, local_value+15):
# construct_begin
s += additiv
# construct_end
pass
for x in xrange(50000):
calledRepeatedly()
print("OK.")
|
Add construct test for known bad inplace string operations.
|
Tests: Add construct test for known bad inplace string operations.
|
Python
|
apache-2.0
|
kayhayen/Nuitka,tempbottle/Nuitka,tempbottle/Nuitka,tempbottle/Nuitka,tempbottle/Nuitka,kayhayen/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka,wfxiang08/Nuitka,wfxiang08/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka
|
Tests: Add construct test for known bad inplace string operations.
|
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Softwar where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
s = "2"
additiv = "*" * 1000
local_value = module_value1
for x in range(local_value, local_value+15):
# construct_begin
s += additiv
# construct_end
pass
for x in xrange(50000):
calledRepeatedly()
print("OK.")
|
<commit_before><commit_msg>Tests: Add construct test for known bad inplace string operations.<commit_after>
|
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Softwar where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
s = "2"
additiv = "*" * 1000
local_value = module_value1
for x in range(local_value, local_value+15):
# construct_begin
s += additiv
# construct_end
pass
for x in xrange(50000):
calledRepeatedly()
print("OK.")
|
Tests: Add construct test for known bad inplace string operations.# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Softwar where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
s = "2"
additiv = "*" * 1000
local_value = module_value1
for x in range(local_value, local_value+15):
# construct_begin
s += additiv
# construct_end
pass
for x in xrange(50000):
calledRepeatedly()
print("OK.")
|
<commit_before><commit_msg>Tests: Add construct test for known bad inplace string operations.<commit_after># Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Softwar where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module_value1 = 5
module_value2 = 3
def calledRepeatedly():
# Force frame and eliminate forward propagation (currently).
module_value1
# Make sure we have a local variable x anyway
s = "2"
additiv = "*" * 1000
local_value = module_value1
for x in range(local_value, local_value+15):
# construct_begin
s += additiv
# construct_end
pass
for x in xrange(50000):
calledRepeatedly()
print("OK.")
|
|
b260040bc3ca48b4e76d73c6efe60b964fa5c108
|
tests/UnreachableSymbolsRemove/RemovingTerminalsTest.py
|
tests/UnreachableSymbolsRemove/RemovingTerminalsTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.08.2017 14:23
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class E(Nonterminal): pass
class F(Nonterminal): pass
class RuleAto0B(Rule): rule = ([A], [0, B])
class RuleBto1C(Rule): rule = ([B], [1, C])
class RuleCto2C(Rule): rule = ([C], [2, C])
class RemovingTerminalsTest(TestCase):
def test_removingTerminals(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
com = ContextFree.remove_unreachable_symbols(g)
self.assertTrue(com.have_term([0, 1, 2]))
self.assertFalse(com.have_term(3))
self.assertTrue(com.have_nonterm([A, B, C]))
self.assertFalse(com.have_nonterm(D))
self.assertFalse(com.have_nonterm(E))
self.assertFalse(com.have_nonterm(F))
def test_removingTerminalsShouldNotChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g)
self.assertTrue(g.have_term([0, 1, 2, 3]))
self.assertTrue(g.have_nonterm([A, B, C, D, E, F]))
def test_removingTerminalsShouldChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term(3))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
self.assertFalse(g.have_nonterm(E))
self.assertFalse(g.have_nonterm(F))
if __name__ == '__main__':
main()
|
Add test of removing unreachable terminals
|
Add test of removing unreachable terminals
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add test of removing unreachable terminals
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.08.2017 14:23
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class E(Nonterminal): pass
class F(Nonterminal): pass
class RuleAto0B(Rule): rule = ([A], [0, B])
class RuleBto1C(Rule): rule = ([B], [1, C])
class RuleCto2C(Rule): rule = ([C], [2, C])
class RemovingTerminalsTest(TestCase):
def test_removingTerminals(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
com = ContextFree.remove_unreachable_symbols(g)
self.assertTrue(com.have_term([0, 1, 2]))
self.assertFalse(com.have_term(3))
self.assertTrue(com.have_nonterm([A, B, C]))
self.assertFalse(com.have_nonterm(D))
self.assertFalse(com.have_nonterm(E))
self.assertFalse(com.have_nonterm(F))
def test_removingTerminalsShouldNotChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g)
self.assertTrue(g.have_term([0, 1, 2, 3]))
self.assertTrue(g.have_nonterm([A, B, C, D, E, F]))
def test_removingTerminalsShouldChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term(3))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
self.assertFalse(g.have_nonterm(E))
self.assertFalse(g.have_nonterm(F))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test of removing unreachable terminals<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.08.2017 14:23
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class E(Nonterminal): pass
class F(Nonterminal): pass
class RuleAto0B(Rule): rule = ([A], [0, B])
class RuleBto1C(Rule): rule = ([B], [1, C])
class RuleCto2C(Rule): rule = ([C], [2, C])
class RemovingTerminalsTest(TestCase):
def test_removingTerminals(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
com = ContextFree.remove_unreachable_symbols(g)
self.assertTrue(com.have_term([0, 1, 2]))
self.assertFalse(com.have_term(3))
self.assertTrue(com.have_nonterm([A, B, C]))
self.assertFalse(com.have_nonterm(D))
self.assertFalse(com.have_nonterm(E))
self.assertFalse(com.have_nonterm(F))
def test_removingTerminalsShouldNotChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g)
self.assertTrue(g.have_term([0, 1, 2, 3]))
self.assertTrue(g.have_nonterm([A, B, C, D, E, F]))
def test_removingTerminalsShouldChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term(3))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
self.assertFalse(g.have_nonterm(E))
self.assertFalse(g.have_nonterm(F))
if __name__ == '__main__':
main()
|
Add test of removing unreachable terminals#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.08.2017 14:23
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class E(Nonterminal): pass
class F(Nonterminal): pass
class RuleAto0B(Rule): rule = ([A], [0, B])
class RuleBto1C(Rule): rule = ([B], [1, C])
class RuleCto2C(Rule): rule = ([C], [2, C])
class RemovingTerminalsTest(TestCase):
def test_removingTerminals(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
com = ContextFree.remove_unreachable_symbols(g)
self.assertTrue(com.have_term([0, 1, 2]))
self.assertFalse(com.have_term(3))
self.assertTrue(com.have_nonterm([A, B, C]))
self.assertFalse(com.have_nonterm(D))
self.assertFalse(com.have_nonterm(E))
self.assertFalse(com.have_nonterm(F))
def test_removingTerminalsShouldNotChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g)
self.assertTrue(g.have_term([0, 1, 2, 3]))
self.assertTrue(g.have_nonterm([A, B, C, D, E, F]))
def test_removingTerminalsShouldChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term(3))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
self.assertFalse(g.have_nonterm(E))
self.assertFalse(g.have_nonterm(F))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test of removing unreachable terminals<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 17.08.2017 14:23
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class E(Nonterminal): pass
class F(Nonterminal): pass
class RuleAto0B(Rule): rule = ([A], [0, B])
class RuleBto1C(Rule): rule = ([B], [1, C])
class RuleCto2C(Rule): rule = ([C], [2, C])
class RemovingTerminalsTest(TestCase):
def test_removingTerminals(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
com = ContextFree.remove_unreachable_symbols(g)
self.assertTrue(com.have_term([0, 1, 2]))
self.assertFalse(com.have_term(3))
self.assertTrue(com.have_nonterm([A, B, C]))
self.assertFalse(com.have_nonterm(D))
self.assertFalse(com.have_nonterm(E))
self.assertFalse(com.have_nonterm(F))
def test_removingTerminalsShouldNotChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g)
self.assertTrue(g.have_term([0, 1, 2, 3]))
self.assertTrue(g.have_nonterm([A, B, C, D, E, F]))
def test_removingTerminalsShouldChange(self):
g = Grammar(terminals=[0, 1, 2, 3],
nonterminals=[A, B, C, D, E, F],
rules=[RuleAto0B, RuleBto1C, RuleCto2C],
start_symbol=A)
ContextFree.remove_unreachable_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term(3))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
self.assertFalse(g.have_nonterm(E))
self.assertFalse(g.have_nonterm(F))
if __name__ == '__main__':
main()
|
|
0ac0c81a3427f35447f52c1643229f5dbe607002
|
osf/migrations/0099_merge_20180426_0930.py
|
osf/migrations/0099_merge_20180426_0930.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
|
Add a merge migration and bring up to date
|
Add a merge migration and bring up to date
|
Python
|
apache-2.0
|
mfraezz/osf.io,erinspace/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,adlius/osf.io,erinspace/osf.io,cslzchen/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,icereval/osf.io,mfraezz/osf.io,icereval/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,icereval/osf.io,saradbowman/osf.io,adlius/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,cslzchen/osf.io,felliott/osf.io,mfraezz/osf.io,felliott/osf.io,adlius/osf.io,aaxelb/osf.io,cslzchen/osf.io,pattisdr/osf.io,sloria/osf.io,brianjgeiger/osf.io,felliott/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,baylee-d/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,felliott/osf.io,pattisdr/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,brianjgeiger/osf.io,erinspace/osf.io,mfraezz/osf.io,cslzchen/osf.io,caseyrollins/osf.io,pattisdr/osf.io,sloria/osf.io,sloria/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mattclark/osf.io
|
Add a merge migration and bring up to date
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
|
<commit_before><commit_msg>Add a merge migration and bring up to date<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
|
Add a merge migration and bring up to date# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
|
<commit_before><commit_msg>Add a merge migration and bring up to date<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
|
|
28696b671a5f80f781c67f35ae5abb30efd6379c
|
solutions/uri/1019/1019.py
|
solutions/uri/1019/1019.py
|
import sys
h = 0
m = 0
for t in sys.stdin:
t = int(t)
if t >= 60 * 60:
h = t // (60 * 60)
t %= 60 * 60
if t >= 60:
m = t // 60
t %= 60
print(f"{h}:{m}:{t}")
h = 0
m = 0
|
Solve Time Conversion in python
|
Solve Time Conversion in python
|
Python
|
mit
|
deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground
|
Solve Time Conversion in python
|
import sys
h = 0
m = 0
for t in sys.stdin:
t = int(t)
if t >= 60 * 60:
h = t // (60 * 60)
t %= 60 * 60
if t >= 60:
m = t // 60
t %= 60
print(f"{h}:{m}:{t}")
h = 0
m = 0
|
<commit_before><commit_msg>Solve Time Conversion in python<commit_after>
|
import sys
h = 0
m = 0
for t in sys.stdin:
t = int(t)
if t >= 60 * 60:
h = t // (60 * 60)
t %= 60 * 60
if t >= 60:
m = t // 60
t %= 60
print(f"{h}:{m}:{t}")
h = 0
m = 0
|
Solve Time Conversion in pythonimport sys
h = 0
m = 0
for t in sys.stdin:
t = int(t)
if t >= 60 * 60:
h = t // (60 * 60)
t %= 60 * 60
if t >= 60:
m = t // 60
t %= 60
print(f"{h}:{m}:{t}")
h = 0
m = 0
|
<commit_before><commit_msg>Solve Time Conversion in python<commit_after>import sys
h = 0
m = 0
for t in sys.stdin:
t = int(t)
if t >= 60 * 60:
h = t // (60 * 60)
t %= 60 * 60
if t >= 60:
m = t // 60
t %= 60
print(f"{h}:{m}:{t}")
h = 0
m = 0
|
|
69005d995aa0e6d291216101253197c6b2d8260a
|
husc/main.py
|
husc/main.py
|
import argparse
parser = argparse.ArgumentParser(description="Run the HUSC functions.")
subpar = parser.add_subparsers()
stitch = subpar.add_parser('stitch',
help="Stitch four quadrants into one image.")
stitch.add_argument('quadrant_image', nargs=4,
help="The images for each quadrant in order: NW, NE, " +
"SW, SE.")
stitch.add_argument('output_image',
help="The filename for the stitched image.")
illum = subpar.add_parser('illum',
help="Estimate and correct illumination.")
illum.add_argument('images', nargs='+',
help="The input images.")
illum.add_argument('-o', '--output-suffix',
default='.illum.tif', metavar='SUFFIX',
help="What suffix to attach to the corrected images.")
def main():
"""Fetch commands from the command line."""
args = parser.parse_args()
print args
if __name__ == '__main__':
main()
|
Add module for command-line interface
|
Add module for command-line interface
|
Python
|
bsd-3-clause
|
starcalibre/microscopium,jni/microscopium,microscopium/microscopium,microscopium/microscopium,Don86/microscopium,jni/microscopium,Don86/microscopium
|
Add module for command-line interface
|
import argparse
parser = argparse.ArgumentParser(description="Run the HUSC functions.")
subpar = parser.add_subparsers()
stitch = subpar.add_parser('stitch',
help="Stitch four quadrants into one image.")
stitch.add_argument('quadrant_image', nargs=4,
help="The images for each quadrant in order: NW, NE, " +
"SW, SE.")
stitch.add_argument('output_image',
help="The filename for the stitched image.")
illum = subpar.add_parser('illum',
help="Estimate and correct illumination.")
illum.add_argument('images', nargs='+',
help="The input images.")
illum.add_argument('-o', '--output-suffix',
default='.illum.tif', metavar='SUFFIX',
help="What suffix to attach to the corrected images.")
def main():
"""Fetch commands from the command line."""
args = parser.parse_args()
print args
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add module for command-line interface<commit_after>
|
import argparse
parser = argparse.ArgumentParser(description="Run the HUSC functions.")
subpar = parser.add_subparsers()
stitch = subpar.add_parser('stitch',
help="Stitch four quadrants into one image.")
stitch.add_argument('quadrant_image', nargs=4,
help="The images for each quadrant in order: NW, NE, " +
"SW, SE.")
stitch.add_argument('output_image',
help="The filename for the stitched image.")
illum = subpar.add_parser('illum',
help="Estimate and correct illumination.")
illum.add_argument('images', nargs='+',
help="The input images.")
illum.add_argument('-o', '--output-suffix',
default='.illum.tif', metavar='SUFFIX',
help="What suffix to attach to the corrected images.")
def main():
"""Fetch commands from the command line."""
args = parser.parse_args()
print args
if __name__ == '__main__':
main()
|
Add module for command-line interfaceimport argparse
parser = argparse.ArgumentParser(description="Run the HUSC functions.")
subpar = parser.add_subparsers()
stitch = subpar.add_parser('stitch',
help="Stitch four quadrants into one image.")
stitch.add_argument('quadrant_image', nargs=4,
help="The images for each quadrant in order: NW, NE, " +
"SW, SE.")
stitch.add_argument('output_image',
help="The filename for the stitched image.")
illum = subpar.add_parser('illum',
help="Estimate and correct illumination.")
illum.add_argument('images', nargs='+',
help="The input images.")
illum.add_argument('-o', '--output-suffix',
default='.illum.tif', metavar='SUFFIX',
help="What suffix to attach to the corrected images.")
def main():
"""Fetch commands from the command line."""
args = parser.parse_args()
print args
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add module for command-line interface<commit_after>import argparse
parser = argparse.ArgumentParser(description="Run the HUSC functions.")
subpar = parser.add_subparsers()
stitch = subpar.add_parser('stitch',
help="Stitch four quadrants into one image.")
stitch.add_argument('quadrant_image', nargs=4,
help="The images for each quadrant in order: NW, NE, " +
"SW, SE.")
stitch.add_argument('output_image',
help="The filename for the stitched image.")
illum = subpar.add_parser('illum',
help="Estimate and correct illumination.")
illum.add_argument('images', nargs='+',
help="The input images.")
illum.add_argument('-o', '--output-suffix',
default='.illum.tif', metavar='SUFFIX',
help="What suffix to attach to the corrected images.")
def main():
"""Fetch commands from the command line."""
args = parser.parse_args()
print args
if __name__ == '__main__':
main()
|
|
ea0087970b0c0adfd8942123899ff0ec231afa03
|
test/selenium/src/lib/page/extended_info.py
|
test/selenium/src/lib/page/extended_info.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
_locator = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.button_map = None
def _reload_contents(self):
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
def map_to_object(self):
try:
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
self.button_map.click()
except exceptions.StaleElementReferenceException:
self._reload_contents()
return self.map_to_object()
def is_already_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self._locator.ALREADY_MAPPED)
return True
except exceptions.NoSuchElementException:
return False
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
from lib.utils import selenium_utils
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
locator_cls = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.is_mapped = None
self.button_map = None
self.title = base.Label(driver, self.locator_cls.TITLE)
self._set_is_mapped()
def map_to_object(self):
selenium_utils.click_on_staleable_element(
self._driver,
self.locator_cls.BUTTON_MAP_TO)
self.is_mapped = True
def _set_is_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self.locator_cls.ALREADY_MAPPED)
self.is_mapped = True
except exceptions.NoSuchElementException:
self.is_mapped = False
|
Handle stealable element with utils
|
Handle stealable element with utils
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
_locator = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.button_map = None
def _reload_contents(self):
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
def map_to_object(self):
try:
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
self.button_map.click()
except exceptions.StaleElementReferenceException:
self._reload_contents()
return self.map_to_object()
def is_already_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self._locator.ALREADY_MAPPED)
return True
except exceptions.NoSuchElementException:
return False
Handle stealable element with utils
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
from lib.utils import selenium_utils
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
locator_cls = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.is_mapped = None
self.button_map = None
self.title = base.Label(driver, self.locator_cls.TITLE)
self._set_is_mapped()
def map_to_object(self):
selenium_utils.click_on_staleable_element(
self._driver,
self.locator_cls.BUTTON_MAP_TO)
self.is_mapped = True
def _set_is_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self.locator_cls.ALREADY_MAPPED)
self.is_mapped = True
except exceptions.NoSuchElementException:
self.is_mapped = False
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
_locator = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.button_map = None
def _reload_contents(self):
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
def map_to_object(self):
try:
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
self.button_map.click()
except exceptions.StaleElementReferenceException:
self._reload_contents()
return self.map_to_object()
def is_already_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self._locator.ALREADY_MAPPED)
return True
except exceptions.NoSuchElementException:
return False
<commit_msg>Handle stealable element with utils<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
from lib.utils import selenium_utils
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
locator_cls = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.is_mapped = None
self.button_map = None
self.title = base.Label(driver, self.locator_cls.TITLE)
self._set_is_mapped()
def map_to_object(self):
selenium_utils.click_on_staleable_element(
self._driver,
self.locator_cls.BUTTON_MAP_TO)
self.is_mapped = True
def _set_is_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self.locator_cls.ALREADY_MAPPED)
self.is_mapped = True
except exceptions.NoSuchElementException:
self.is_mapped = False
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
_locator = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.button_map = None
def _reload_contents(self):
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
def map_to_object(self):
try:
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
self.button_map.click()
except exceptions.StaleElementReferenceException:
self._reload_contents()
return self.map_to_object()
def is_already_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self._locator.ALREADY_MAPPED)
return True
except exceptions.NoSuchElementException:
return False
Handle stealable element with utils# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
from lib.utils import selenium_utils
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
locator_cls = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.is_mapped = None
self.button_map = None
self.title = base.Label(driver, self.locator_cls.TITLE)
self._set_is_mapped()
def map_to_object(self):
selenium_utils.click_on_staleable_element(
self._driver,
self.locator_cls.BUTTON_MAP_TO)
self.is_mapped = True
def _set_is_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self.locator_cls.ALREADY_MAPPED)
self.is_mapped = True
except exceptions.NoSuchElementException:
self.is_mapped = False
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
_locator = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.button_map = None
def _reload_contents(self):
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
def map_to_object(self):
try:
self.button_map = base.Button(
self._driver, self._locator.BUTTON_MAP_TO)
self.button_map.click()
except exceptions.StaleElementReferenceException:
self._reload_contents()
return self.map_to_object()
def is_already_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self._locator.ALREADY_MAPPED)
return True
except exceptions.NoSuchElementException:
return False
<commit_msg>Handle stealable element with utils<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: jernej@reciprocitylabs.com
# Maintained By: jernej@reciprocitylabs.com
"""A module for extended info page models (visible in LHN on hover over
object members)"""
from selenium.common import exceptions
from lib import base
from lib.constants import locator
from lib.utils import selenium_utils
class ExtendedInfo(base.Component):
"""Model representing an extended info box that allows the object to be
mapped"""
locator_cls = locator.ExtendedInfo
def __init__(self, driver):
super(ExtendedInfo, self).__init__(driver)
self.is_mapped = None
self.button_map = None
self.title = base.Label(driver, self.locator_cls.TITLE)
self._set_is_mapped()
def map_to_object(self):
selenium_utils.click_on_staleable_element(
self._driver,
self.locator_cls.BUTTON_MAP_TO)
self.is_mapped = True
def _set_is_mapped(self):
"""Checks if the object is already mapped"""
try:
self._driver.find_element(*self.locator_cls.ALREADY_MAPPED)
self.is_mapped = True
except exceptions.NoSuchElementException:
self.is_mapped = False
|
7922b24882894cbc83bd4247c11d8c4a66b4b218
|
_setup_database.py
|
_setup_database.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
|
Add utility script for database setup
|
Add utility script for database setup
|
Python
|
mit
|
leaffan/pynhldb
|
Add utility script for database setup
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
|
<commit_before><commit_msg>Add utility script for database setup<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
|
Add utility script for database setup#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
|
<commit_before><commit_msg>Add utility script for database setup<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
|
|
93621f9441af4df77c8364050d7cc3dc2b1b43b2
|
tests/functional/registration/test_check.py
|
tests/functional/registration/test_check.py
|
"""
Test check/validation command.
"""
import os
import subprocess
this_folder = os.path.abspath(os.path.dirname(__file__))
def test_check_metamodel():
"""
Meta-model is also a model
"""
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'flow_dsl', 'Flow.tx')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'Flow.tx: OK.' in output
def test_check_valid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'data_flow.eflow: OK.' in output
def test_check_invalid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow_including_error.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'error: types must be lowercase' in output
|
Add tests for `check` command
|
Add tests for `check` command
|
Python
|
mit
|
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
|
Add tests for `check` command
|
"""
Test check/validation command.
"""
import os
import subprocess
this_folder = os.path.abspath(os.path.dirname(__file__))
def test_check_metamodel():
"""
Meta-model is also a model
"""
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'flow_dsl', 'Flow.tx')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'Flow.tx: OK.' in output
def test_check_valid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'data_flow.eflow: OK.' in output
def test_check_invalid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow_including_error.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'error: types must be lowercase' in output
|
<commit_before><commit_msg>Add tests for `check` command<commit_after>
|
"""
Test check/validation command.
"""
import os
import subprocess
this_folder = os.path.abspath(os.path.dirname(__file__))
def test_check_metamodel():
"""
Meta-model is also a model
"""
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'flow_dsl', 'Flow.tx')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'Flow.tx: OK.' in output
def test_check_valid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'data_flow.eflow: OK.' in output
def test_check_invalid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow_including_error.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'error: types must be lowercase' in output
|
Add tests for `check` command"""
Test check/validation command.
"""
import os
import subprocess
this_folder = os.path.abspath(os.path.dirname(__file__))
def test_check_metamodel():
"""
Meta-model is also a model
"""
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'flow_dsl', 'Flow.tx')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'Flow.tx: OK.' in output
def test_check_valid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'data_flow.eflow: OK.' in output
def test_check_invalid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow_including_error.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'error: types must be lowercase' in output
|
<commit_before><commit_msg>Add tests for `check` command<commit_after>"""
Test check/validation command.
"""
import os
import subprocess
this_folder = os.path.abspath(os.path.dirname(__file__))
def test_check_metamodel():
"""
Meta-model is also a model
"""
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'flow_dsl', 'Flow.tx')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'Flow.tx: OK.' in output
def test_check_valid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'data_flow.eflow: OK.' in output
def test_check_invalid_model():
metamodel_file = os.path.join(this_folder,
'projects', 'flow_dsl', 'tests',
'models', 'data_flow_including_error.eflow')
output = subprocess.check_output(['textx', 'check', metamodel_file],
stderr=subprocess.STDOUT)
assert b'error: types must be lowercase' in output
|
|
474c5f977ab5b035567f0107c457622c51189ac6
|
csunplugged/topics/migrations/0086_auto_20171108_0840.py
|
csunplugged/topics/migrations/0086_auto_20171108_0840.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-08 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0085_auto_20171030_0035'),
]
operations = [
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_de',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_en',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_fr',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='classroomresource',
name='description_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='programmingchallengelanguage',
name='name',
field=models.CharField(max_length=200),
),
]
|
Add new topics migration file
|
Add new topics migration file
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
Add new topics migration file
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-08 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0085_auto_20171030_0035'),
]
operations = [
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_de',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_en',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_fr',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='classroomresource',
name='description_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='programmingchallengelanguage',
name='name',
field=models.CharField(max_length=200),
),
]
|
<commit_before><commit_msg>Add new topics migration file<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-08 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0085_auto_20171030_0035'),
]
operations = [
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_de',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_en',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_fr',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='classroomresource',
name='description_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='programmingchallengelanguage',
name='name',
field=models.CharField(max_length=200),
),
]
|
Add new topics migration file# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-08 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0085_auto_20171030_0035'),
]
operations = [
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_de',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_en',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_fr',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='classroomresource',
name='description_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='programmingchallengelanguage',
name='name',
field=models.CharField(max_length=200),
),
]
|
<commit_before><commit_msg>Add new topics migration file<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-08 08:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('topics', '0085_auto_20171030_0035'),
]
operations = [
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_de',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_en',
field=models.CharField(max_length=200, null=True),
),
migrations.AddField(
model_name='programmingchallengelanguage',
name='name_fr',
field=models.CharField(max_length=200, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='classroomresource',
name='description_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='classroomresource',
name='description_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_de',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_en',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='curriculumarea',
name='name_fr',
field=models.CharField(default='', max_length=100, null=True),
),
migrations.AlterField(
model_name='programmingchallengelanguage',
name='name',
field=models.CharField(max_length=200),
),
]
|
|
71afe426a84789b65953ccd057014d17a11de859
|
mzalendo/core/management/commands/core_extend_areas_to_generation_2.py
|
mzalendo/core/management/commands/core_extend_areas_to_generation_2.py
|
# The import of data into Kenyan MapIt had the constituencies in
# generation 2, while all the other area types were in generation 1.
# This is unfortunate since it makes it appear to later import scripts
# that the district type disappeared between generation 1 and 3.
#
# This script just extends the generation_high to generation 2 for
# every area where it was set to generation 2.
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Change all genertion_high=1 to generation_high=2'
def handle_noargs(self, **options):
g1 = Generation.objects.get(id=1)
g2 = Generation.objects.get(id=2)
for area in Area.objects.filter(generation_high=g1):
area.generation_high = g2
area.save()
|
Add a command to extend the generation_high from generation 1 to 2
|
Add a command to extend the generation_high from generation 1 to 2
|
Python
|
agpl-3.0
|
mysociety/pombola,geoffkilpin/pombola,ken-muturi/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola
|
Add a command to extend the generation_high from generation 1 to 2
|
# The import of data into Kenyan MapIt had the constituencies in
# generation 2, while all the other area types were in generation 1.
# This is unfortunate since it makes it appear to later import scripts
# that the district type disappeared between generation 1 and 3.
#
# This script just extends the generation_high to generation 2 for
# every area where it was set to generation 2.
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Change all genertion_high=1 to generation_high=2'
def handle_noargs(self, **options):
g1 = Generation.objects.get(id=1)
g2 = Generation.objects.get(id=2)
for area in Area.objects.filter(generation_high=g1):
area.generation_high = g2
area.save()
|
<commit_before><commit_msg>Add a command to extend the generation_high from generation 1 to 2<commit_after>
|
# The import of data into Kenyan MapIt had the constituencies in
# generation 2, while all the other area types were in generation 1.
# This is unfortunate since it makes it appear to later import scripts
# that the district type disappeared between generation 1 and 3.
#
# This script just extends the generation_high to generation 2 for
# every area where it was set to generation 2.
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Change all genertion_high=1 to generation_high=2'
def handle_noargs(self, **options):
g1 = Generation.objects.get(id=1)
g2 = Generation.objects.get(id=2)
for area in Area.objects.filter(generation_high=g1):
area.generation_high = g2
area.save()
|
Add a command to extend the generation_high from generation 1 to 2# The import of data into Kenyan MapIt had the constituencies in
# generation 2, while all the other area types were in generation 1.
# This is unfortunate since it makes it appear to later import scripts
# that the district type disappeared between generation 1 and 3.
#
# This script just extends the generation_high to generation 2 for
# every area where it was set to generation 2.
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Change all genertion_high=1 to generation_high=2'
def handle_noargs(self, **options):
g1 = Generation.objects.get(id=1)
g2 = Generation.objects.get(id=2)
for area in Area.objects.filter(generation_high=g1):
area.generation_high = g2
area.save()
|
<commit_before><commit_msg>Add a command to extend the generation_high from generation 1 to 2<commit_after># The import of data into Kenyan MapIt had the constituencies in
# generation 2, while all the other area types were in generation 1.
# This is unfortunate since it makes it appear to later import scripts
# that the district type disappeared between generation 1 and 3.
#
# This script just extends the generation_high to generation 2 for
# every area where it was set to generation 2.
from django.core.management.base import NoArgsCommand
from mapit.models import Area, Generation, Type, NameType, Country, CodeType
class Command(NoArgsCommand):
help = 'Change all genertion_high=1 to generation_high=2'
def handle_noargs(self, **options):
g1 = Generation.objects.get(id=1)
g2 = Generation.objects.get(id=2)
for area in Area.objects.filter(generation_high=g1):
area.generation_high = g2
area.save()
|
|
b88b97c7d56506804fc9eb93ce7074454fc492f3
|
base/apps/people/migrations/0002_auto_20141223_0316.py
|
base/apps/people/migrations/0002_auto_20141223_0316.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Designation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('romanized_name', models.CharField(max_length=60)),
('started', models.DateField(db_index=True)),
('ended', models.DateField(db_index=True, null=True, blank=True)),
('group', models.ForeignKey(related_name=b'designations', to='people.Group')),
],
options={
'get_latest_by': 'started',
},
bases=(models.Model,),
),
migrations.AlterOrderWithRespectTo(
name='designation',
order_with_respect_to='group',
),
]
|
Add the migration for designations.
|
Add the migration for designations.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
Add the migration for designations.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Designation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('romanized_name', models.CharField(max_length=60)),
('started', models.DateField(db_index=True)),
('ended', models.DateField(db_index=True, null=True, blank=True)),
('group', models.ForeignKey(related_name=b'designations', to='people.Group')),
],
options={
'get_latest_by': 'started',
},
bases=(models.Model,),
),
migrations.AlterOrderWithRespectTo(
name='designation',
order_with_respect_to='group',
),
]
|
<commit_before><commit_msg>Add the migration for designations.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Designation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('romanized_name', models.CharField(max_length=60)),
('started', models.DateField(db_index=True)),
('ended', models.DateField(db_index=True, null=True, blank=True)),
('group', models.ForeignKey(related_name=b'designations', to='people.Group')),
],
options={
'get_latest_by': 'started',
},
bases=(models.Model,),
),
migrations.AlterOrderWithRespectTo(
name='designation',
order_with_respect_to='group',
),
]
|
Add the migration for designations.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Designation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('romanized_name', models.CharField(max_length=60)),
('started', models.DateField(db_index=True)),
('ended', models.DateField(db_index=True, null=True, blank=True)),
('group', models.ForeignKey(related_name=b'designations', to='people.Group')),
],
options={
'get_latest_by': 'started',
},
bases=(models.Model,),
),
migrations.AlterOrderWithRespectTo(
name='designation',
order_with_respect_to='group',
),
]
|
<commit_before><commit_msg>Add the migration for designations.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Designation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=60)),
('romanized_name', models.CharField(max_length=60)),
('started', models.DateField(db_index=True)),
('ended', models.DateField(db_index=True, null=True, blank=True)),
('group', models.ForeignKey(related_name=b'designations', to='people.Group')),
],
options={
'get_latest_by': 'started',
},
bases=(models.Model,),
),
migrations.AlterOrderWithRespectTo(
name='designation',
order_with_respect_to='group',
),
]
|
|
0cabf3c4dae3599e2d1627ff41707cf36b4d2ddd
|
acoustics/__init__.py
|
acoustics/__init__.py
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.ambisonics
import acoustics.utils
import acoustics.octave
import acoustics.doppler
import acoustics.signal
import acoustics.directivity
import acoustics.building
import acoustics.room
import acoustics.standards
import acoustics.cepstrum
from acoustics._signal import Signal
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.aio
import acoustics.ambisonics
import acoustics.atmosphere
import acoustics.bands
import acoustics.building
import acoustics.cepstrum
import acoustics.criterion
import acoustics.decibel
import acoustics.descriptors
import acoustics.directivity
import acoustics.doppler
import acoustics.generator
import acoustics.imaging
import acoustics.octave
import acoustics.power
import acoustics.quantity
import acoustics.reflection
import acoustics.room
import acoustics.signal
import acoustics.turbulence
#import acoustics.utils
import acoustics.weighting
from acoustics._signal import Signal
|
Load all modules by default
|
Load all modules by default
|
Python
|
bsd-3-clause
|
python-acoustics/python-acoustics,antiface/python-acoustics,FRidh/python-acoustics,felipeacsi/python-acoustics,giumas/python-acoustics
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.ambisonics
import acoustics.utils
import acoustics.octave
import acoustics.doppler
import acoustics.signal
import acoustics.directivity
import acoustics.building
import acoustics.room
import acoustics.standards
import acoustics.cepstrum
from acoustics._signal import Signal
Load all modules by default
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.aio
import acoustics.ambisonics
import acoustics.atmosphere
import acoustics.bands
import acoustics.building
import acoustics.cepstrum
import acoustics.criterion
import acoustics.decibel
import acoustics.descriptors
import acoustics.directivity
import acoustics.doppler
import acoustics.generator
import acoustics.imaging
import acoustics.octave
import acoustics.power
import acoustics.quantity
import acoustics.reflection
import acoustics.room
import acoustics.signal
import acoustics.turbulence
#import acoustics.utils
import acoustics.weighting
from acoustics._signal import Signal
|
<commit_before>"""
Acoustics
=========
The acoustics module...
"""
import acoustics.ambisonics
import acoustics.utils
import acoustics.octave
import acoustics.doppler
import acoustics.signal
import acoustics.directivity
import acoustics.building
import acoustics.room
import acoustics.standards
import acoustics.cepstrum
from acoustics._signal import Signal
<commit_msg>Load all modules by default<commit_after>
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.aio
import acoustics.ambisonics
import acoustics.atmosphere
import acoustics.bands
import acoustics.building
import acoustics.cepstrum
import acoustics.criterion
import acoustics.decibel
import acoustics.descriptors
import acoustics.directivity
import acoustics.doppler
import acoustics.generator
import acoustics.imaging
import acoustics.octave
import acoustics.power
import acoustics.quantity
import acoustics.reflection
import acoustics.room
import acoustics.signal
import acoustics.turbulence
#import acoustics.utils
import acoustics.weighting
from acoustics._signal import Signal
|
"""
Acoustics
=========
The acoustics module...
"""
import acoustics.ambisonics
import acoustics.utils
import acoustics.octave
import acoustics.doppler
import acoustics.signal
import acoustics.directivity
import acoustics.building
import acoustics.room
import acoustics.standards
import acoustics.cepstrum
from acoustics._signal import Signal
Load all modules by default"""
Acoustics
=========
The acoustics module...
"""
import acoustics.aio
import acoustics.ambisonics
import acoustics.atmosphere
import acoustics.bands
import acoustics.building
import acoustics.cepstrum
import acoustics.criterion
import acoustics.decibel
import acoustics.descriptors
import acoustics.directivity
import acoustics.doppler
import acoustics.generator
import acoustics.imaging
import acoustics.octave
import acoustics.power
import acoustics.quantity
import acoustics.reflection
import acoustics.room
import acoustics.signal
import acoustics.turbulence
#import acoustics.utils
import acoustics.weighting
from acoustics._signal import Signal
|
<commit_before>"""
Acoustics
=========
The acoustics module...
"""
import acoustics.ambisonics
import acoustics.utils
import acoustics.octave
import acoustics.doppler
import acoustics.signal
import acoustics.directivity
import acoustics.building
import acoustics.room
import acoustics.standards
import acoustics.cepstrum
from acoustics._signal import Signal
<commit_msg>Load all modules by default<commit_after>"""
Acoustics
=========
The acoustics module...
"""
import acoustics.aio
import acoustics.ambisonics
import acoustics.atmosphere
import acoustics.bands
import acoustics.building
import acoustics.cepstrum
import acoustics.criterion
import acoustics.decibel
import acoustics.descriptors
import acoustics.directivity
import acoustics.doppler
import acoustics.generator
import acoustics.imaging
import acoustics.octave
import acoustics.power
import acoustics.quantity
import acoustics.reflection
import acoustics.room
import acoustics.signal
import acoustics.turbulence
#import acoustics.utils
import acoustics.weighting
from acoustics._signal import Signal
|
513af3716c596bb67c0f6552824b854b3735858c
|
corehq/apps/domain/tests/test_password_strength.py
|
corehq/apps/domain/tests/test_password_strength.py
|
from django import forms
from django.test import SimpleTestCase, override_settings
from corehq.apps.domain.forms import clean_password
class PasswordStrengthTest(SimpleTestCase):
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_0_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[0])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_1_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[1])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_2_password(self):
self.assert_good_password(PASSWORDS_BY_STRENGTH[2])
@override_settings(MINIMUM_ZXCVBN_SCORE=3)
def test_sensitivity_to_minimum_zxcvbn_score_setting(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[2])
def assert_good_password(self, password):
self.assertEqual(clean_password(password), password)
def assert_bad_password(self, password):
with self.assertRaises(forms.ValidationError):
clean_password(password)
PASSWORDS_BY_STRENGTH = {
0: 's3cr3t',
1: 'password7',
2: 'aljfzpo',
3: '1234mna823',
4: ')(^#:LKNVA^',
}
|
Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting
|
Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting
|
from django import forms
from django.test import SimpleTestCase, override_settings
from corehq.apps.domain.forms import clean_password
class PasswordStrengthTest(SimpleTestCase):
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_0_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[0])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_1_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[1])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_2_password(self):
self.assert_good_password(PASSWORDS_BY_STRENGTH[2])
@override_settings(MINIMUM_ZXCVBN_SCORE=3)
def test_sensitivity_to_minimum_zxcvbn_score_setting(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[2])
def assert_good_password(self, password):
self.assertEqual(clean_password(password), password)
def assert_bad_password(self, password):
with self.assertRaises(forms.ValidationError):
clean_password(password)
PASSWORDS_BY_STRENGTH = {
0: 's3cr3t',
1: 'password7',
2: 'aljfzpo',
3: '1234mna823',
4: ')(^#:LKNVA^',
}
|
<commit_before><commit_msg>Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting<commit_after>
|
from django import forms
from django.test import SimpleTestCase, override_settings
from corehq.apps.domain.forms import clean_password
class PasswordStrengthTest(SimpleTestCase):
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_0_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[0])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_1_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[1])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_2_password(self):
self.assert_good_password(PASSWORDS_BY_STRENGTH[2])
@override_settings(MINIMUM_ZXCVBN_SCORE=3)
def test_sensitivity_to_minimum_zxcvbn_score_setting(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[2])
def assert_good_password(self, password):
self.assertEqual(clean_password(password), password)
def assert_bad_password(self, password):
with self.assertRaises(forms.ValidationError):
clean_password(password)
PASSWORDS_BY_STRENGTH = {
0: 's3cr3t',
1: 'password7',
2: 'aljfzpo',
3: '1234mna823',
4: ')(^#:LKNVA^',
}
|
Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE settingfrom django import forms
from django.test import SimpleTestCase, override_settings
from corehq.apps.domain.forms import clean_password
class PasswordStrengthTest(SimpleTestCase):
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_0_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[0])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_1_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[1])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_2_password(self):
self.assert_good_password(PASSWORDS_BY_STRENGTH[2])
@override_settings(MINIMUM_ZXCVBN_SCORE=3)
def test_sensitivity_to_minimum_zxcvbn_score_setting(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[2])
def assert_good_password(self, password):
self.assertEqual(clean_password(password), password)
def assert_bad_password(self, password):
with self.assertRaises(forms.ValidationError):
clean_password(password)
PASSWORDS_BY_STRENGTH = {
0: 's3cr3t',
1: 'password7',
2: 'aljfzpo',
3: '1234mna823',
4: ')(^#:LKNVA^',
}
|
<commit_before><commit_msg>Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting<commit_after>from django import forms
from django.test import SimpleTestCase, override_settings
from corehq.apps.domain.forms import clean_password
class PasswordStrengthTest(SimpleTestCase):
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_0_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[0])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_1_password(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[1])
@override_settings(MINIMUM_ZXCVBN_SCORE=2)
def test_score_2_password(self):
self.assert_good_password(PASSWORDS_BY_STRENGTH[2])
@override_settings(MINIMUM_ZXCVBN_SCORE=3)
def test_sensitivity_to_minimum_zxcvbn_score_setting(self):
self.assert_bad_password(PASSWORDS_BY_STRENGTH[2])
def assert_good_password(self, password):
self.assertEqual(clean_password(password), password)
def assert_bad_password(self, password):
with self.assertRaises(forms.ValidationError):
clean_password(password)
PASSWORDS_BY_STRENGTH = {
0: 's3cr3t',
1: 'password7',
2: 'aljfzpo',
3: '1234mna823',
4: ')(^#:LKNVA^',
}
|
|
97d6cce2a5c0c905f0c33c41316c8e65eaed0e08
|
update-bikestations.py
|
update-bikestations.py
|
#!/usr/bin/env python
from multiprocessing.pool import ThreadPool
import requests
import json
baseurl = 'http://api.citybik.es/v2/networks/'
networkids = [ 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',
'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' ]
def process_network(networkid):
r = requests.get(baseurl + networkid)
network = r.json()['network']
# output just the stations that are installed, only the metadata we care
# about
output_stations = []
for station in network['stations']:
# some networks list "uninstalled" stations. don't want those
if not station['extra'].get('installed') or station['extra']['installed']:
output_stations.append({'id': station['id'],
'name': station['name'],
'freeBikes': station['free_bikes'],
'emptySlots': station['empty_slots'],
'latitude': station['latitude'],
'longitude': station['longitude']})
open('%s.json' % networkid, 'w').write(json.dumps(output_stations))
return network['location']
pool = ThreadPool()
locations = pool.map(process_network, networkids)
with open('locations.js', 'w') as f:
f.write('var networks = {')
for (i, networkid) in enumerate(networkids):
location = locations[i]
f.write('"%s": { name: "%s", latitude: %s, longitude: %s },' % (
networkid, location['city'], location['latitude'],
location['longitude']))
f.write('};')
|
Update way we synchronize from citybik.es
|
Update way we synchronize from citybik.es
Now we only download the information we actually need. We also download
and generate network information dynamically, which should enable some
cool stuff
|
Python
|
mit
|
wlach/nixi,wlach/nixi
|
Update way we synchronize from citybik.es
Now we only download the information we actually need. We also download
and generate network information dynamically, which should enable some
cool stuff
|
#!/usr/bin/env python
from multiprocessing.pool import ThreadPool
import requests
import json
baseurl = 'http://api.citybik.es/v2/networks/'
networkids = [ 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',
'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' ]
def process_network(networkid):
r = requests.get(baseurl + networkid)
network = r.json()['network']
# output just the stations that are installed, only the metadata we care
# about
output_stations = []
for station in network['stations']:
# some networks list "uninstalled" stations. don't want those
if not station['extra'].get('installed') or station['extra']['installed']:
output_stations.append({'id': station['id'],
'name': station['name'],
'freeBikes': station['free_bikes'],
'emptySlots': station['empty_slots'],
'latitude': station['latitude'],
'longitude': station['longitude']})
open('%s.json' % networkid, 'w').write(json.dumps(output_stations))
return network['location']
pool = ThreadPool()
locations = pool.map(process_network, networkids)
with open('locations.js', 'w') as f:
f.write('var networks = {')
for (i, networkid) in enumerate(networkids):
location = locations[i]
f.write('"%s": { name: "%s", latitude: %s, longitude: %s },' % (
networkid, location['city'], location['latitude'],
location['longitude']))
f.write('};')
|
<commit_before><commit_msg>Update way we synchronize from citybik.es
Now we only download the information we actually need. We also download
and generate network information dynamically, which should enable some
cool stuff<commit_after>
|
#!/usr/bin/env python
from multiprocessing.pool import ThreadPool
import requests
import json
baseurl = 'http://api.citybik.es/v2/networks/'
networkids = [ 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',
'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' ]
def process_network(networkid):
r = requests.get(baseurl + networkid)
network = r.json()['network']
# output just the stations that are installed, only the metadata we care
# about
output_stations = []
for station in network['stations']:
# some networks list "uninstalled" stations. don't want those
if not station['extra'].get('installed') or station['extra']['installed']:
output_stations.append({'id': station['id'],
'name': station['name'],
'freeBikes': station['free_bikes'],
'emptySlots': station['empty_slots'],
'latitude': station['latitude'],
'longitude': station['longitude']})
open('%s.json' % networkid, 'w').write(json.dumps(output_stations))
return network['location']
pool = ThreadPool()
locations = pool.map(process_network, networkids)
with open('locations.js', 'w') as f:
f.write('var networks = {')
for (i, networkid) in enumerate(networkids):
location = locations[i]
f.write('"%s": { name: "%s", latitude: %s, longitude: %s },' % (
networkid, location['city'], location['latitude'],
location['longitude']))
f.write('};')
|
Update way we synchronize from citybik.es
Now we only download the information we actually need. We also download
and generate network information dynamically, which should enable some
cool stuff#!/usr/bin/env python
from multiprocessing.pool import ThreadPool
import requests
import json
baseurl = 'http://api.citybik.es/v2/networks/'
networkids = [ 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',
'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' ]
def process_network(networkid):
r = requests.get(baseurl + networkid)
network = r.json()['network']
# output just the stations that are installed, only the metadata we care
# about
output_stations = []
for station in network['stations']:
# some networks list "uninstalled" stations. don't want those
if not station['extra'].get('installed') or station['extra']['installed']:
output_stations.append({'id': station['id'],
'name': station['name'],
'freeBikes': station['free_bikes'],
'emptySlots': station['empty_slots'],
'latitude': station['latitude'],
'longitude': station['longitude']})
open('%s.json' % networkid, 'w').write(json.dumps(output_stations))
return network['location']
pool = ThreadPool()
locations = pool.map(process_network, networkids)
with open('locations.js', 'w') as f:
f.write('var networks = {')
for (i, networkid) in enumerate(networkids):
location = locations[i]
f.write('"%s": { name: "%s", latitude: %s, longitude: %s },' % (
networkid, location['city'], location['latitude'],
location['longitude']))
f.write('};')
|
<commit_before><commit_msg>Update way we synchronize from citybik.es
Now we only download the information we actually need. We also download
and generate network information dynamically, which should enable some
cool stuff<commit_after>#!/usr/bin/env python
from multiprocessing.pool import ThreadPool
import requests
import json
baseurl = 'http://api.citybik.es/v2/networks/'
networkids = [ 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',
'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' ]
def process_network(networkid):
r = requests.get(baseurl + networkid)
network = r.json()['network']
# output just the stations that are installed, only the metadata we care
# about
output_stations = []
for station in network['stations']:
# some networks list "uninstalled" stations. don't want those
if not station['extra'].get('installed') or station['extra']['installed']:
output_stations.append({'id': station['id'],
'name': station['name'],
'freeBikes': station['free_bikes'],
'emptySlots': station['empty_slots'],
'latitude': station['latitude'],
'longitude': station['longitude']})
open('%s.json' % networkid, 'w').write(json.dumps(output_stations))
return network['location']
pool = ThreadPool()
locations = pool.map(process_network, networkids)
with open('locations.js', 'w') as f:
f.write('var networks = {')
for (i, networkid) in enumerate(networkids):
location = locations[i]
f.write('"%s": { name: "%s", latitude: %s, longitude: %s },' % (
networkid, location['city'], location['latitude'],
location['longitude']))
f.write('};')
|
|
665b3372e089fda3dde104b0754efa65a87a9bd2
|
Sketches/MPS/Bookmarks/TestHTTPResponseHandler.py
|
Sketches/MPS/Bookmarks/TestHTTPResponseHandler.py
|
#!/usr/bin/python
import base64
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Chassis.Pipeline import Pipeline
from TwitterStream import HTTPClientResponseHandler
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(
ReadFileAdaptor("tweets.b64.txt", readmode="line"),
PureTransformer(base64.b64decode),
HTTPClientResponseHandler(suppress_header = True),
SimpleFileWriter("tweets.b64raw.txt"),
).run()
|
Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network
|
Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network
|
#!/usr/bin/python
import base64
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Chassis.Pipeline import Pipeline
from TwitterStream import HTTPClientResponseHandler
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(
ReadFileAdaptor("tweets.b64.txt", readmode="line"),
PureTransformer(base64.b64decode),
HTTPClientResponseHandler(suppress_header = True),
SimpleFileWriter("tweets.b64raw.txt"),
).run()
|
<commit_before><commit_msg>Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network<commit_after>
|
#!/usr/bin/python
import base64
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Chassis.Pipeline import Pipeline
from TwitterStream import HTTPClientResponseHandler
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(
ReadFileAdaptor("tweets.b64.txt", readmode="line"),
PureTransformer(base64.b64decode),
HTTPClientResponseHandler(suppress_header = True),
SimpleFileWriter("tweets.b64raw.txt"),
).run()
|
Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network#!/usr/bin/python
import base64
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Chassis.Pipeline import Pipeline
from TwitterStream import HTTPClientResponseHandler
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(
ReadFileAdaptor("tweets.b64.txt", readmode="line"),
PureTransformer(base64.b64decode),
HTTPClientResponseHandler(suppress_header = True),
SimpleFileWriter("tweets.b64raw.txt"),
).run()
|
<commit_before><commit_msg>Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network<commit_after>#!/usr/bin/python
import base64
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
from Kamaelia.File.Writing import SimpleFileWriter
from Kamaelia.Chassis.Pipeline import Pipeline
from TwitterStream import HTTPClientResponseHandler
from Kamaelia.Util.PureTransformer import PureTransformer
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(
ReadFileAdaptor("tweets.b64.txt", readmode="line"),
PureTransformer(base64.b64decode),
HTTPClientResponseHandler(suppress_header = True),
SimpleFileWriter("tweets.b64raw.txt"),
).run()
|
|
53467bd7d4c9c12b73c66244a91f31f0dbadeeec
|
hc/front/tests/test_add_pagerteam.py
|
hc/front/tests/test_add_pagerteam.py
|
from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPagerTeamTestCase(BaseTestCase):
url = "/integrations/add_pagerteam/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "PagerTeam")
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pagerteam")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
|
Add pagerteam tests file which had been missed despite its existence
|
Add pagerteam tests file which had been missed despite its existence
|
Python
|
bsd-3-clause
|
healthchecks/healthchecks,iphoting/healthchecks,iphoting/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks
|
Add pagerteam tests file which had been missed despite its existence
|
from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPagerTeamTestCase(BaseTestCase):
url = "/integrations/add_pagerteam/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "PagerTeam")
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pagerteam")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
|
<commit_before><commit_msg>Add pagerteam tests file which had been missed despite its existence<commit_after>
|
from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPagerTeamTestCase(BaseTestCase):
url = "/integrations/add_pagerteam/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "PagerTeam")
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pagerteam")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
|
Add pagerteam tests file which had been missed despite its existencefrom hc.api.models import Channel
from hc.test import BaseTestCase
class AddPagerTeamTestCase(BaseTestCase):
url = "/integrations/add_pagerteam/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "PagerTeam")
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pagerteam")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
|
<commit_before><commit_msg>Add pagerteam tests file which had been missed despite its existence<commit_after>from hc.api.models import Channel
from hc.test import BaseTestCase
class AddPagerTeamTestCase(BaseTestCase):
url = "/integrations/add_pagerteam/"
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "PagerTeam")
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, "/integrations/")
c = Channel.objects.get()
self.assertEqual(c.kind, "pagerteam")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
|
|
9f9916d662d1ab130c9685c415c25b19a14733d7
|
examples/svm_objectives.py
|
examples/svm_objectives.py
|
# showing the relation between cutting plane and primal objectives
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from pystruct.problems import CrammerSingerSVMProblem
from pystruct.learners import (StructuredSVM, OneSlackSSVM,
SubgradientStructuredSVM)
# do a binary digit classification
digits = load_digits()
X, y = digits.data, digits.target
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
pbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=500, batch_size=10)
one_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=1000, tol=0.001)
subgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,
max_iter=300, decay_exponent=0,
momentum=0, verbose=0)
# n-slack cutting plane ssvm
n_slack_svm.fit(X_train_bias, y_train)
## 1-slack cutting plane ssvm
one_slack_svm.fit(X_train_bias, y_train)
# online subgradient ssvm
subgradient_svm.fit(X_train_bias, y_train)
#plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(one_slack_svm.objective_curve_, label="one-slack lower bound")
plt.plot(one_slack_svm.primal_objective_curve_, label="one-slack primal")
plt.plot(subgradient_svm.objective_curve_, label="subgradient")
plt.legend()
plt.show()
|
Add example to illustrate different optimization procedures
|
Add example to illustrate different optimization procedures
|
Python
|
bsd-2-clause
|
d-mittal/pystruct,massmutual/pystruct,wattlebird/pystruct,massmutual/pystruct,wattlebird/pystruct,pystruct/pystruct,amueller/pystruct,d-mittal/pystruct,pystruct/pystruct,amueller/pystruct
|
Add example to illustrate different optimization procedures
|
# showing the relation between cutting plane and primal objectives
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from pystruct.problems import CrammerSingerSVMProblem
from pystruct.learners import (StructuredSVM, OneSlackSSVM,
SubgradientStructuredSVM)
# do a binary digit classification
digits = load_digits()
X, y = digits.data, digits.target
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
pbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=500, batch_size=10)
one_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=1000, tol=0.001)
subgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,
max_iter=300, decay_exponent=0,
momentum=0, verbose=0)
# n-slack cutting plane ssvm
n_slack_svm.fit(X_train_bias, y_train)
## 1-slack cutting plane ssvm
one_slack_svm.fit(X_train_bias, y_train)
# online subgradient ssvm
subgradient_svm.fit(X_train_bias, y_train)
#plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(one_slack_svm.objective_curve_, label="one-slack lower bound")
plt.plot(one_slack_svm.primal_objective_curve_, label="one-slack primal")
plt.plot(subgradient_svm.objective_curve_, label="subgradient")
plt.legend()
plt.show()
|
<commit_before><commit_msg>Add example to illustrate different optimization procedures<commit_after>
|
# showing the relation between cutting plane and primal objectives
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from pystruct.problems import CrammerSingerSVMProblem
from pystruct.learners import (StructuredSVM, OneSlackSSVM,
SubgradientStructuredSVM)
# do a binary digit classification
digits = load_digits()
X, y = digits.data, digits.target
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
pbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=500, batch_size=10)
one_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=1000, tol=0.001)
subgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,
max_iter=300, decay_exponent=0,
momentum=0, verbose=0)
# n-slack cutting plane ssvm
n_slack_svm.fit(X_train_bias, y_train)
## 1-slack cutting plane ssvm
one_slack_svm.fit(X_train_bias, y_train)
# online subgradient ssvm
subgradient_svm.fit(X_train_bias, y_train)
#plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(one_slack_svm.objective_curve_, label="one-slack lower bound")
plt.plot(one_slack_svm.primal_objective_curve_, label="one-slack primal")
plt.plot(subgradient_svm.objective_curve_, label="subgradient")
plt.legend()
plt.show()
|
Add example to illustrate different optimization procedures# showing the relation between cutting plane and primal objectives
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from pystruct.problems import CrammerSingerSVMProblem
from pystruct.learners import (StructuredSVM, OneSlackSSVM,
SubgradientStructuredSVM)
# do a binary digit classification
digits = load_digits()
X, y = digits.data, digits.target
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
pbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=500, batch_size=10)
one_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=1000, tol=0.001)
subgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,
max_iter=300, decay_exponent=0,
momentum=0, verbose=0)
# n-slack cutting plane ssvm
n_slack_svm.fit(X_train_bias, y_train)
## 1-slack cutting plane ssvm
one_slack_svm.fit(X_train_bias, y_train)
# online subgradient ssvm
subgradient_svm.fit(X_train_bias, y_train)
#plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(one_slack_svm.objective_curve_, label="one-slack lower bound")
plt.plot(one_slack_svm.primal_objective_curve_, label="one-slack primal")
plt.plot(subgradient_svm.objective_curve_, label="subgradient")
plt.legend()
plt.show()
|
<commit_before><commit_msg>Add example to illustrate different optimization procedures<commit_after># showing the relation between cutting plane and primal objectives
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
from sklearn.cross_validation import train_test_split
from pystruct.problems import CrammerSingerSVMProblem
from pystruct.learners import (StructuredSVM, OneSlackSSVM,
SubgradientStructuredSVM)
# do a binary digit classification
digits = load_digits()
X, y = digits.data, digits.target
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# we add a constant 1 feature for the bias
X_train_bias = np.hstack([X_train, np.ones((X_train.shape[0], 1))])
pbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape[1], n_classes=10)
n_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=500, batch_size=10)
one_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,
max_iter=1000, tol=0.001)
subgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,
max_iter=300, decay_exponent=0,
momentum=0, verbose=0)
# n-slack cutting plane ssvm
n_slack_svm.fit(X_train_bias, y_train)
## 1-slack cutting plane ssvm
one_slack_svm.fit(X_train_bias, y_train)
# online subgradient ssvm
subgradient_svm.fit(X_train_bias, y_train)
#plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(n_slack_svm.objective_curve_, label="n-slack lower bound")
plt.plot(one_slack_svm.objective_curve_, label="one-slack lower bound")
plt.plot(one_slack_svm.primal_objective_curve_, label="one-slack primal")
plt.plot(subgradient_svm.objective_curve_, label="subgradient")
plt.legend()
plt.show()
|
|
3a2e9a19feab0c882a9821b7ff555bd1e2693190
|
exp/sandbox/DeltaLaplacianExp.py
|
exp/sandbox/DeltaLaplacianExp.py
|
import numpy
import scipy.sparse
from apgl.graph import GraphUtils
from apgl.util.Util import Util
numpy.set_printoptions(suppress=True, precision=3)
n = 10
W1 = scipy.sparse.rand(n, n, 0.5).todense()
W1 = W1.T.dot(W1)
W2 = W1.copy()
W2[1, 2] = 1
W2[2, 1] = 1
print("W1="+str(W1))
print("W2="+str(W2))
L1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))
L2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))
deltaL = L2 - L1
print("L1="+str(L1.todense()))
print("L2="+str(L2.todense()))
print("deltaL="+str(deltaL.todense()))
print("rank(deltaL)=" + str(Util.rank(deltaL.todense())))
|
Test effect of change in Laplacian.
|
Test effect of change in Laplacian.
|
Python
|
bsd-3-clause
|
charanpald/APGL
|
Test effect of change in Laplacian.
|
import numpy
import scipy.sparse
from apgl.graph import GraphUtils
from apgl.util.Util import Util
numpy.set_printoptions(suppress=True, precision=3)
n = 10
W1 = scipy.sparse.rand(n, n, 0.5).todense()
W1 = W1.T.dot(W1)
W2 = W1.copy()
W2[1, 2] = 1
W2[2, 1] = 1
print("W1="+str(W1))
print("W2="+str(W2))
L1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))
L2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))
deltaL = L2 - L1
print("L1="+str(L1.todense()))
print("L2="+str(L2.todense()))
print("deltaL="+str(deltaL.todense()))
print("rank(deltaL)=" + str(Util.rank(deltaL.todense())))
|
<commit_before><commit_msg>Test effect of change in Laplacian. <commit_after>
|
import numpy
import scipy.sparse
from apgl.graph import GraphUtils
from apgl.util.Util import Util
numpy.set_printoptions(suppress=True, precision=3)
n = 10
W1 = scipy.sparse.rand(n, n, 0.5).todense()
W1 = W1.T.dot(W1)
W2 = W1.copy()
W2[1, 2] = 1
W2[2, 1] = 1
print("W1="+str(W1))
print("W2="+str(W2))
L1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))
L2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))
deltaL = L2 - L1
print("L1="+str(L1.todense()))
print("L2="+str(L2.todense()))
print("deltaL="+str(deltaL.todense()))
print("rank(deltaL)=" + str(Util.rank(deltaL.todense())))
|
Test effect of change in Laplacian. import numpy
import scipy.sparse
from apgl.graph import GraphUtils
from apgl.util.Util import Util
numpy.set_printoptions(suppress=True, precision=3)
n = 10
W1 = scipy.sparse.rand(n, n, 0.5).todense()
W1 = W1.T.dot(W1)
W2 = W1.copy()
W2[1, 2] = 1
W2[2, 1] = 1
print("W1="+str(W1))
print("W2="+str(W2))
L1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))
L2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))
deltaL = L2 - L1
print("L1="+str(L1.todense()))
print("L2="+str(L2.todense()))
print("deltaL="+str(deltaL.todense()))
print("rank(deltaL)=" + str(Util.rank(deltaL.todense())))
|
<commit_before><commit_msg>Test effect of change in Laplacian. <commit_after>import numpy
import scipy.sparse
from apgl.graph import GraphUtils
from apgl.util.Util import Util
numpy.set_printoptions(suppress=True, precision=3)
n = 10
W1 = scipy.sparse.rand(n, n, 0.5).todense()
W1 = W1.T.dot(W1)
W2 = W1.copy()
W2[1, 2] = 1
W2[2, 1] = 1
print("W1="+str(W1))
print("W2="+str(W2))
L1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))
L2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))
deltaL = L2 - L1
print("L1="+str(L1.todense()))
print("L2="+str(L2.todense()))
print("deltaL="+str(deltaL.todense()))
print("rank(deltaL)=" + str(Util.rank(deltaL.todense())))
|
|
9a22cf7452723686a5065658ce5c9d31333c8a33
|
examples/download_random_leader_avatar.py
|
examples/download_random_leader_avatar.py
|
# Run with Python 3
import json
import requests
from random import randint
import shutil
import math
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Get leaders by count
def get_leaders(count):
pages = math.ceil(count / 20)
leaders = []
for page in range(1, pages + 1):
api_url = 'https://stepic.org/api/leaders/?page={}'.format(page)
response = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
leaders += response['leaders']
if not response['meta']['has_next']:
break
return leaders
# Get user by id
def get_user(id):
api_url = 'https://stepic.org/api/users/{}/'.format(id)
return json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)['users'][0]
# Download avatar by user id
def download_avatar(id, filename):
avatar_url = get_user(id)['avatar']
response = requests.get(avatar_url, stream=True)
with open('{}.png'.format(filename), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
# Get leader user randomly from 100 leaders and download his avatar
rand_leader_id = get_leaders(100)[randint(0, 99)]['user']
download_avatar(rand_leader_id, 'leader')
|
Add download random leader avatar to examples
|
Add download random leader avatar to examples
|
Python
|
mit
|
StepicOrg/Stepic-API
|
Add download random leader avatar to examples
|
# Run with Python 3
import json
import requests
from random import randint
import shutil
import math
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Get leaders by count
def get_leaders(count):
pages = math.ceil(count / 20)
leaders = []
for page in range(1, pages + 1):
api_url = 'https://stepic.org/api/leaders/?page={}'.format(page)
response = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
leaders += response['leaders']
if not response['meta']['has_next']:
break
return leaders
# Get user by id
def get_user(id):
api_url = 'https://stepic.org/api/users/{}/'.format(id)
return json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)['users'][0]
# Download avatar by user id
def download_avatar(id, filename):
avatar_url = get_user(id)['avatar']
response = requests.get(avatar_url, stream=True)
with open('{}.png'.format(filename), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
# Get leader user randomly from 100 leaders and download his avatar
rand_leader_id = get_leaders(100)[randint(0, 99)]['user']
download_avatar(rand_leader_id, 'leader')
|
<commit_before><commit_msg>Add download random leader avatar to examples<commit_after>
|
# Run with Python 3
import json
import requests
from random import randint
import shutil
import math
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Get leaders by count
def get_leaders(count):
pages = math.ceil(count / 20)
leaders = []
for page in range(1, pages + 1):
api_url = 'https://stepic.org/api/leaders/?page={}'.format(page)
response = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
leaders += response['leaders']
if not response['meta']['has_next']:
break
return leaders
# Get user by id
def get_user(id):
api_url = 'https://stepic.org/api/users/{}/'.format(id)
return json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)['users'][0]
# Download avatar by user id
def download_avatar(id, filename):
avatar_url = get_user(id)['avatar']
response = requests.get(avatar_url, stream=True)
with open('{}.png'.format(filename), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
# Get leader user randomly from 100 leaders and download his avatar
rand_leader_id = get_leaders(100)[randint(0, 99)]['user']
download_avatar(rand_leader_id, 'leader')
|
Add download random leader avatar to examples# Run with Python 3
import json
import requests
from random import randint
import shutil
import math
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Get leaders by count
def get_leaders(count):
pages = math.ceil(count / 20)
leaders = []
for page in range(1, pages + 1):
api_url = 'https://stepic.org/api/leaders/?page={}'.format(page)
response = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
leaders += response['leaders']
if not response['meta']['has_next']:
break
return leaders
# Get user by id
def get_user(id):
api_url = 'https://stepic.org/api/users/{}/'.format(id)
return json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)['users'][0]
# Download avatar by user id
def download_avatar(id, filename):
avatar_url = get_user(id)['avatar']
response = requests.get(avatar_url, stream=True)
with open('{}.png'.format(filename), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
# Get leader user randomly from 100 leaders and download his avatar
rand_leader_id = get_leaders(100)[randint(0, 99)]['user']
download_avatar(rand_leader_id, 'leader')
|
<commit_before><commit_msg>Add download random leader avatar to examples<commit_after># Run with Python 3
import json
import requests
from random import randint
import shutil
import math
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Get leaders by count
def get_leaders(count):
pages = math.ceil(count / 20)
leaders = []
for page in range(1, pages + 1):
api_url = 'https://stepic.org/api/leaders/?page={}'.format(page)
response = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
leaders += response['leaders']
if not response['meta']['has_next']:
break
return leaders
# Get user by id
def get_user(id):
api_url = 'https://stepic.org/api/users/{}/'.format(id)
return json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)['users'][0]
# Download avatar by user id
def download_avatar(id, filename):
avatar_url = get_user(id)['avatar']
response = requests.get(avatar_url, stream=True)
with open('{}.png'.format(filename), 'wb') as out_file:
shutil.copyfileobj(response.raw, out_file)
# Get leader user randomly from 100 leaders and download his avatar
rand_leader_id = get_leaders(100)[randint(0, 99)]['user']
download_avatar(rand_leader_id, 'leader')
|
|
0507a47f1c15bac5f6eddbeb9c712f5c2b2a9358
|
intake_bluesky/tests/test_msgpack.py
|
intake_bluesky/tests/test_msgpack.py
|
import intake_bluesky.msgpack # noqa
import intake
from suitcase.msgpack import Serializer
import os
import pytest
import shutil
import tempfile
import time
import types
from .generic import * # noqa
TMP_DIR = tempfile.mkdtemp()
TEST_CATALOG_PATH = [TMP_DIR]
YAML_FILENAME = 'intake_msgpack_test_catalog.yml'
def teardown_module(module):
try:
shutil.rmtree(TMP_DIR)
except BaseException:
pass
@pytest.fixture(params=['local', 'remote'])
def bundle(request, intake_server, example_data, tmp_path): # noqa
serializer = Serializer(tmp_path)
uid, docs = example_data
for name, doc in docs:
serializer(name, doc)
serializer.close()
fullname = os.path.join(TMP_DIR, YAML_FILENAME)
with open(fullname, 'w') as f:
f.write(f'''
plugins:
source:
- module: intake_bluesky
sources:
xyz:
description: Some imaginary beamline
driver: intake_bluesky.msgpack.BlueskyMsgpackCatalog
container: catalog
args:
paths: {[str(path) for path in serializer.artifacts['all']]}
handler_registry:
NPY_SEQ: ophyd.sim.NumpySeqHandler
metadata:
beamline: "00-ID"
''')
time.sleep(2)
if request.param == 'local':
cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
elif request.param == 'remote':
cat = intake.Catalog(intake_server, page_size=10)
else:
raise ValueError
return types.SimpleNamespace(cat=cat,
uid=uid,
docs=docs)
|
Add tests for msgpack reader.
|
TST: Add tests for msgpack reader.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
TST: Add tests for msgpack reader.
|
import intake_bluesky.msgpack # noqa
import intake
from suitcase.msgpack import Serializer
import os
import pytest
import shutil
import tempfile
import time
import types
from .generic import * # noqa
TMP_DIR = tempfile.mkdtemp()
TEST_CATALOG_PATH = [TMP_DIR]
YAML_FILENAME = 'intake_msgpack_test_catalog.yml'
def teardown_module(module):
try:
shutil.rmtree(TMP_DIR)
except BaseException:
pass
@pytest.fixture(params=['local', 'remote'])
def bundle(request, intake_server, example_data, tmp_path): # noqa
serializer = Serializer(tmp_path)
uid, docs = example_data
for name, doc in docs:
serializer(name, doc)
serializer.close()
fullname = os.path.join(TMP_DIR, YAML_FILENAME)
with open(fullname, 'w') as f:
f.write(f'''
plugins:
source:
- module: intake_bluesky
sources:
xyz:
description: Some imaginary beamline
driver: intake_bluesky.msgpack.BlueskyMsgpackCatalog
container: catalog
args:
paths: {[str(path) for path in serializer.artifacts['all']]}
handler_registry:
NPY_SEQ: ophyd.sim.NumpySeqHandler
metadata:
beamline: "00-ID"
''')
time.sleep(2)
if request.param == 'local':
cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
elif request.param == 'remote':
cat = intake.Catalog(intake_server, page_size=10)
else:
raise ValueError
return types.SimpleNamespace(cat=cat,
uid=uid,
docs=docs)
|
<commit_before><commit_msg>TST: Add tests for msgpack reader.<commit_after>
|
import intake_bluesky.msgpack # noqa
import intake
from suitcase.msgpack import Serializer
import os
import pytest
import shutil
import tempfile
import time
import types
from .generic import * # noqa
TMP_DIR = tempfile.mkdtemp()
TEST_CATALOG_PATH = [TMP_DIR]
YAML_FILENAME = 'intake_msgpack_test_catalog.yml'
def teardown_module(module):
try:
shutil.rmtree(TMP_DIR)
except BaseException:
pass
@pytest.fixture(params=['local', 'remote'])
def bundle(request, intake_server, example_data, tmp_path): # noqa
serializer = Serializer(tmp_path)
uid, docs = example_data
for name, doc in docs:
serializer(name, doc)
serializer.close()
fullname = os.path.join(TMP_DIR, YAML_FILENAME)
with open(fullname, 'w') as f:
f.write(f'''
plugins:
source:
- module: intake_bluesky
sources:
xyz:
description: Some imaginary beamline
driver: intake_bluesky.msgpack.BlueskyMsgpackCatalog
container: catalog
args:
paths: {[str(path) for path in serializer.artifacts['all']]}
handler_registry:
NPY_SEQ: ophyd.sim.NumpySeqHandler
metadata:
beamline: "00-ID"
''')
time.sleep(2)
if request.param == 'local':
cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
elif request.param == 'remote':
cat = intake.Catalog(intake_server, page_size=10)
else:
raise ValueError
return types.SimpleNamespace(cat=cat,
uid=uid,
docs=docs)
|
TST: Add tests for msgpack reader.import intake_bluesky.msgpack # noqa
import intake
from suitcase.msgpack import Serializer
import os
import pytest
import shutil
import tempfile
import time
import types
from .generic import * # noqa
TMP_DIR = tempfile.mkdtemp()
TEST_CATALOG_PATH = [TMP_DIR]
YAML_FILENAME = 'intake_msgpack_test_catalog.yml'
def teardown_module(module):
try:
shutil.rmtree(TMP_DIR)
except BaseException:
pass
@pytest.fixture(params=['local', 'remote'])
def bundle(request, intake_server, example_data, tmp_path): # noqa
serializer = Serializer(tmp_path)
uid, docs = example_data
for name, doc in docs:
serializer(name, doc)
serializer.close()
fullname = os.path.join(TMP_DIR, YAML_FILENAME)
with open(fullname, 'w') as f:
f.write(f'''
plugins:
source:
- module: intake_bluesky
sources:
xyz:
description: Some imaginary beamline
driver: intake_bluesky.msgpack.BlueskyMsgpackCatalog
container: catalog
args:
paths: {[str(path) for path in serializer.artifacts['all']]}
handler_registry:
NPY_SEQ: ophyd.sim.NumpySeqHandler
metadata:
beamline: "00-ID"
''')
time.sleep(2)
if request.param == 'local':
cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
elif request.param == 'remote':
cat = intake.Catalog(intake_server, page_size=10)
else:
raise ValueError
return types.SimpleNamespace(cat=cat,
uid=uid,
docs=docs)
|
<commit_before><commit_msg>TST: Add tests for msgpack reader.<commit_after>import intake_bluesky.msgpack # noqa
import intake
from suitcase.msgpack import Serializer
import os
import pytest
import shutil
import tempfile
import time
import types
from .generic import * # noqa
TMP_DIR = tempfile.mkdtemp()
TEST_CATALOG_PATH = [TMP_DIR]
YAML_FILENAME = 'intake_msgpack_test_catalog.yml'
def teardown_module(module):
try:
shutil.rmtree(TMP_DIR)
except BaseException:
pass
@pytest.fixture(params=['local', 'remote'])
def bundle(request, intake_server, example_data, tmp_path): # noqa
serializer = Serializer(tmp_path)
uid, docs = example_data
for name, doc in docs:
serializer(name, doc)
serializer.close()
fullname = os.path.join(TMP_DIR, YAML_FILENAME)
with open(fullname, 'w') as f:
f.write(f'''
plugins:
source:
- module: intake_bluesky
sources:
xyz:
description: Some imaginary beamline
driver: intake_bluesky.msgpack.BlueskyMsgpackCatalog
container: catalog
args:
paths: {[str(path) for path in serializer.artifacts['all']]}
handler_registry:
NPY_SEQ: ophyd.sim.NumpySeqHandler
metadata:
beamline: "00-ID"
''')
time.sleep(2)
if request.param == 'local':
cat = intake.Catalog(os.path.join(TMP_DIR, YAML_FILENAME))
elif request.param == 'remote':
cat = intake.Catalog(intake_server, page_size=10)
else:
raise ValueError
return types.SimpleNamespace(cat=cat,
uid=uid,
docs=docs)
|
|
a42a6a54f732ca7eba700b867a3025739ad6a271
|
list_all_users_in_group.py
|
list_all_users_in_group.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
def main():
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
if __name__ == "__main__":
main()
|
Move main code to function because of pylint warning 'Invalid constant name'
|
Move main code to function because of pylint warning 'Invalid constant name'
|
Python
|
cc0-1.0
|
vazhnov/list_all_users_in_group
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
Move main code to function because of pylint warning 'Invalid constant name'
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
def main():
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
if __name__ == "__main__":
main()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
<commit_msg>Move main code to function because of pylint warning 'Invalid constant name'<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
def main():
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
if __name__ == "__main__":
main()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
Move main code to function because of pylint warning 'Invalid constant name'#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
def main():
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
if __name__ == "__main__":
main()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
<commit_msg>Move main code to function because of pylint warning 'Invalid constant name'<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
def main():
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
if __name__ == "__main__":
main()
|
d067d9937ff34787e6f632d86075af29c27d98f8
|
py/best-time-to-buy-and-sell-stock-with-transaction-fee.py
|
py/best-time-to-buy-and-sell-stock-with-transaction-fee.py
|
class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
hold, not_hold = None, 0
for p in prices:
hold, not_hold = max(hold, not_hold - p - fee), max(not_hold, None if hold is None else hold + p)
return max(hold, not_hold)
|
Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
|
Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
714. Best Time to Buy and Sell Stock with Transaction Fee: https://leetcode.com/problems/best-time-to-buy-and-sell-stock-with-transaction-fee/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
714. Best Time to Buy and Sell Stock with Transaction Fee: https://leetcode.com/problems/best-time-to-buy-and-sell-stock-with-transaction-fee/
|
class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
hold, not_hold = None, 0
for p in prices:
hold, not_hold = max(hold, not_hold - p - fee), max(not_hold, None if hold is None else hold + p)
return max(hold, not_hold)
|
<commit_before><commit_msg>Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
714. Best Time to Buy and Sell Stock with Transaction Fee: https://leetcode.com/problems/best-time-to-buy-and-sell-stock-with-transaction-fee/<commit_after>
|
class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
hold, not_hold = None, 0
for p in prices:
hold, not_hold = max(hold, not_hold - p - fee), max(not_hold, None if hold is None else hold + p)
return max(hold, not_hold)
|
Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
714. Best Time to Buy and Sell Stock with Transaction Fee: https://leetcode.com/problems/best-time-to-buy-and-sell-stock-with-transaction-fee/class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
hold, not_hold = None, 0
for p in prices:
hold, not_hold = max(hold, not_hold - p - fee), max(not_hold, None if hold is None else hold + p)
return max(hold, not_hold)
|
<commit_before><commit_msg>Add py solution for 714. Best Time to Buy and Sell Stock with Transaction Fee
714. Best Time to Buy and Sell Stock with Transaction Fee: https://leetcode.com/problems/best-time-to-buy-and-sell-stock-with-transaction-fee/<commit_after>class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
hold, not_hold = None, 0
for p in prices:
hold, not_hold = max(hold, not_hold - p - fee), max(not_hold, None if hold is None else hold + p)
return max(hold, not_hold)
|
|
4c2c80e0004a758787beb555fbbe789cce5e82fc
|
nova/tests/test_vmwareapi_vm_util.py
|
nova/tests/test_vmwareapi_vm_util.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
|
Fix variable referenced before assginment in vmwareapi code.
|
Fix variable referenced before assginment in vmwareapi code.
Add unitests for VMwareapi vm_util.
fix bug #1177689
Change-Id: If16109ee626c197227affba122c2e4986d92d2df
|
Python
|
apache-2.0
|
n0ano/gantt,n0ano/gantt
|
Fix variable referenced before assginment in vmwareapi code.
Add unitests for VMwareapi vm_util.
fix bug #1177689
Change-Id: If16109ee626c197227affba122c2e4986d92d2df
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
|
<commit_before><commit_msg>Fix variable referenced before assginment in vmwareapi code.
Add unitests for VMwareapi vm_util.
fix bug #1177689
Change-Id: If16109ee626c197227affba122c2e4986d92d2df<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
|
Fix variable referenced before assginment in vmwareapi code.
Add unitests for VMwareapi vm_util.
fix bug #1177689
Change-Id: If16109ee626c197227affba122c2e4986d92d2df# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
|
<commit_before><commit_msg>Fix variable referenced before assginment in vmwareapi code.
Add unitests for VMwareapi vm_util.
fix bug #1177689
Change-Id: If16109ee626c197227affba122c2e4986d92d2df<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
|
|
1dc439fcf7a823270156708208339a8bf420703c
|
opps/sitemaps/sitemaps.py
|
opps/sitemaps/sitemaps.py
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.articles.models import Article
def InfoDisct(googlenews=False):
article = Article.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Article.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
Create Generic Sitemap abstract django
|
Create Generic Sitemap abstract django
|
Python
|
mit
|
opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps
|
Create Generic Sitemap abstract django
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.articles.models import Article
def InfoDisct(googlenews=False):
article = Article.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Article.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
<commit_before><commit_msg>Create Generic Sitemap abstract django<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.articles.models import Article
def InfoDisct(googlenews=False):
article = Article.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Article.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
Create Generic Sitemap abstract django# -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.articles.models import Article
def InfoDisct(googlenews=False):
article = Article.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Article.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
<commit_before><commit_msg>Create Generic Sitemap abstract django<commit_after># -*- coding: utf-8 -*-
from django.contrib.sitemaps import GenericSitemap as DjangoGenericSitemap
from django.contrib.sitemaps import Sitemap as DjangoSitemap
from django.utils import timezone
from opps.articles.models import Article
def InfoDisct(googlenews=False):
article = Article.objects.filter(date_available__lte=timezone.now(),
published=True)
if googlenews:
article = article[:1000]
return {
'queryset': article,
'date_field': 'date_available',
}
class BaseSitemap(DjangoSitemap):
priority = 0.6
def items(self):
return Article.objects.filter(date_available__lte=timezone.now(),
published=True)
def lastmod(self, obj):
return obj.date_available
class GenericSitemap(DjangoGenericSitemap):
limit = 1000
priority = 0.6
|
|
7707e65ed591b890d91bcb7bf22923b8c17a113a
|
readthedocs/rtd_tests/tests/test_api_permissions.py
|
readthedocs/rtd_tests/tests/test_api_permissions.py
|
from functools import partial
from mock import Mock
from unittest import TestCase
from readthedocs.restapi.permissions import APIRestrictedPermission
class APIRestrictedPermissionTests(TestCase):
def get_request(self, method, is_admin):
request = Mock()
request.method = method
request.user.is_staff = is_admin
return request
def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
assertAllow = partial(self.assertAllow, handler, obj=None)
assertDisallow = partial(self.assertDisallow, handler, obj=None)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
def test_object_permissions(self):
handler = APIRestrictedPermission()
obj = Mock()
assertAllow = partial(self.assertAllow, handler, obj=obj)
assertDisallow = partial(self.assertDisallow, handler, obj=obj)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
|
Add tests from Gregor's PR
|
Add tests from Gregor's PR
|
Python
|
mit
|
davidfischer/readthedocs.org,wijerasa/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,SteveViss/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,safwanrahman/readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,SteveViss/readthedocs.org,techtonik/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,techtonik/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,clarkperkins/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org
|
Add tests from Gregor's PR
|
from functools import partial
from mock import Mock
from unittest import TestCase
from readthedocs.restapi.permissions import APIRestrictedPermission
class APIRestrictedPermissionTests(TestCase):
def get_request(self, method, is_admin):
request = Mock()
request.method = method
request.user.is_staff = is_admin
return request
def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
assertAllow = partial(self.assertAllow, handler, obj=None)
assertDisallow = partial(self.assertDisallow, handler, obj=None)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
def test_object_permissions(self):
handler = APIRestrictedPermission()
obj = Mock()
assertAllow = partial(self.assertAllow, handler, obj=obj)
assertDisallow = partial(self.assertDisallow, handler, obj=obj)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
|
<commit_before><commit_msg>Add tests from Gregor's PR<commit_after>
|
from functools import partial
from mock import Mock
from unittest import TestCase
from readthedocs.restapi.permissions import APIRestrictedPermission
class APIRestrictedPermissionTests(TestCase):
def get_request(self, method, is_admin):
request = Mock()
request.method = method
request.user.is_staff = is_admin
return request
def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
assertAllow = partial(self.assertAllow, handler, obj=None)
assertDisallow = partial(self.assertDisallow, handler, obj=None)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
def test_object_permissions(self):
handler = APIRestrictedPermission()
obj = Mock()
assertAllow = partial(self.assertAllow, handler, obj=obj)
assertDisallow = partial(self.assertDisallow, handler, obj=obj)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
|
Add tests from Gregor's PRfrom functools import partial
from mock import Mock
from unittest import TestCase
from readthedocs.restapi.permissions import APIRestrictedPermission
class APIRestrictedPermissionTests(TestCase):
def get_request(self, method, is_admin):
request = Mock()
request.method = method
request.user.is_staff = is_admin
return request
def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
assertAllow = partial(self.assertAllow, handler, obj=None)
assertDisallow = partial(self.assertDisallow, handler, obj=None)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
def test_object_permissions(self):
handler = APIRestrictedPermission()
obj = Mock()
assertAllow = partial(self.assertAllow, handler, obj=obj)
assertDisallow = partial(self.assertDisallow, handler, obj=obj)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
|
<commit_before><commit_msg>Add tests from Gregor's PR<commit_after>from functools import partial
from mock import Mock
from unittest import TestCase
from readthedocs.restapi.permissions import APIRestrictedPermission
class APIRestrictedPermissionTests(TestCase):
def get_request(self, method, is_admin):
request = Mock()
request.method = method
request.user.is_staff = is_admin
return request
def assertAllow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertTrue(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertTrue(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def assertDisallow(self, handler, method, is_admin, obj=None):
if obj is None:
self.assertFalse(handler.has_permission(
request=self.get_request(method, is_admin=is_admin),
view=None))
else:
self.assertFalse(handler.has_object_permission(
request=self.get_request(method, is_admin=is_admin),
view=None,
obj=obj))
def test_non_object_permissions(self):
handler = APIRestrictedPermission()
assertAllow = partial(self.assertAllow, handler, obj=None)
assertDisallow = partial(self.assertDisallow, handler, obj=None)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
def test_object_permissions(self):
handler = APIRestrictedPermission()
obj = Mock()
assertAllow = partial(self.assertAllow, handler, obj=obj)
assertDisallow = partial(self.assertDisallow, handler, obj=obj)
assertAllow('GET', is_admin=False)
assertAllow('HEAD', is_admin=False)
assertAllow('OPTIONS', is_admin=False)
assertDisallow('DELETE', is_admin=False)
assertDisallow('PATCH', is_admin=False)
assertDisallow('POST', is_admin=False)
assertDisallow('PUT', is_admin=False)
assertAllow('GET', is_admin=True)
assertAllow('HEAD', is_admin=True)
assertAllow('OPTIONS', is_admin=True)
assertAllow('DELETE', is_admin=True)
assertAllow('PATCH', is_admin=True)
assertAllow('POST', is_admin=True)
assertAllow('PUT', is_admin=True)
|
|
30567284410b9bb7154b8d39e5dfe7bc4bb1b269
|
herald/migrations/0006_auto_20170825_1813.py
|
herald/migrations/0006_auto_20170825_1813.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-08-25 23:13
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('herald', '0005_merge_20170407_1316'),
]
operations = [
migrations.AlterField(
model_name='sentnotification',
name='user',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
Add migration for on_delete SET_NULL
|
Add migration for on_delete SET_NULL
|
Python
|
mit
|
worthwhile/django-herald,jproffitt/django-herald,jproffitt/django-herald,worthwhile/django-herald
|
Add migration for on_delete SET_NULL
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-08-25 23:13
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('herald', '0005_merge_20170407_1316'),
]
operations = [
migrations.AlterField(
model_name='sentnotification',
name='user',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
<commit_before><commit_msg>Add migration for on_delete SET_NULL<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-08-25 23:13
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('herald', '0005_merge_20170407_1316'),
]
operations = [
migrations.AlterField(
model_name='sentnotification',
name='user',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
Add migration for on_delete SET_NULL# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-08-25 23:13
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('herald', '0005_merge_20170407_1316'),
]
operations = [
migrations.AlterField(
model_name='sentnotification',
name='user',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
<commit_before><commit_msg>Add migration for on_delete SET_NULL<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-08-25 23:13
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('herald', '0005_merge_20170407_1316'),
]
operations = [
migrations.AlterField(
model_name='sentnotification',
name='user',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
|
|
f3c4bac262c6d09730b3f0c4a24639fde8b4d923
|
gunicorn-app.py
|
gunicorn-app.py
|
from __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
def handler_app(environ, start_response):
response_body = b'Works fine'
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
]
start_response(status, response_headers)
return [response_body]
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
if __name__ == '__main__':
options = {
'bind': '%s:%s' % ('127.0.0.1', '8080'),
'workers': number_of_workers(),
}
StandaloneApplication(handler_app, options).run()
|
Add wsgi compatible example gunicorn application
|
Add wsgi compatible example gunicorn application
|
Python
|
mit
|
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
|
Add wsgi compatible example gunicorn application
|
from __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
def handler_app(environ, start_response):
response_body = b'Works fine'
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
]
start_response(status, response_headers)
return [response_body]
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
if __name__ == '__main__':
options = {
'bind': '%s:%s' % ('127.0.0.1', '8080'),
'workers': number_of_workers(),
}
StandaloneApplication(handler_app, options).run()
|
<commit_before><commit_msg>Add wsgi compatible example gunicorn application<commit_after>
|
from __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
def handler_app(environ, start_response):
response_body = b'Works fine'
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
]
start_response(status, response_headers)
return [response_body]
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
if __name__ == '__main__':
options = {
'bind': '%s:%s' % ('127.0.0.1', '8080'),
'workers': number_of_workers(),
}
StandaloneApplication(handler_app, options).run()
|
Add wsgi compatible example gunicorn applicationfrom __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
def handler_app(environ, start_response):
response_body = b'Works fine'
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
]
start_response(status, response_headers)
return [response_body]
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
if __name__ == '__main__':
options = {
'bind': '%s:%s' % ('127.0.0.1', '8080'),
'workers': number_of_workers(),
}
StandaloneApplication(handler_app, options).run()
|
<commit_before><commit_msg>Add wsgi compatible example gunicorn application<commit_after>from __future__ import unicode_literals
import multiprocessing
import gunicorn.app.base
from gunicorn.six import iteritems
def number_of_workers():
return (multiprocessing.cpu_count() * 2) + 1
def handler_app(environ, start_response):
response_body = b'Works fine'
status = '200 OK'
response_headers = [
('Content-Type', 'text/plain'),
]
start_response(status, response_headers)
return [response_body]
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
if __name__ == '__main__':
options = {
'bind': '%s:%s' % ('127.0.0.1', '8080'),
'workers': number_of_workers(),
}
StandaloneApplication(handler_app, options).run()
|
|
ae948c95ea0087f33f13ef3463dc022eda0301a2
|
python/labs/make-a-short-story/mystory.py
|
python/labs/make-a-short-story/mystory.py
|
# Create a function for adjectives so I don't repeat myself in prompts.
def get_adjective():
return raw_input("Give me an adjective: ")
def get_noun():
return raw_input("Give me a noun: ")
def get_verb():
return raw_input("Give me a verb: ")
adjective1 = get_adjective()
noun1 = get_noun()
verb1 = get_verb()
adjective2 = get_adjective()
noun2 = get_noun()
verb2 = get_verb()
# Use parentheses so Python will "know" the string has multiple lines
print ("At CSSI we were all " + adjective1 + " when a " + noun1 +
" fell through the ceiling. See-Mong tried to " + verb1 + " it but it " +
"was too " + adjective2 + ". Instead, Zack gave it a " + noun2 + " which " +
"caused it to " + verb2 + ".")
|
Add a solution for the MadLibs lab
|
Add a solution for the MadLibs lab
|
Python
|
apache-2.0
|
google/cssi-labs,google/cssi-labs
|
Add a solution for the MadLibs lab
|
# Create a function for adjectives so I don't repeat myself in prompts.
def get_adjective():
return raw_input("Give me an adjective: ")
def get_noun():
return raw_input("Give me a noun: ")
def get_verb():
return raw_input("Give me a verb: ")
adjective1 = get_adjective()
noun1 = get_noun()
verb1 = get_verb()
adjective2 = get_adjective()
noun2 = get_noun()
verb2 = get_verb()
# Use parentheses so Python will "know" the string has multiple lines
print ("At CSSI we were all " + adjective1 + " when a " + noun1 +
" fell through the ceiling. See-Mong tried to " + verb1 + " it but it " +
"was too " + adjective2 + ". Instead, Zack gave it a " + noun2 + " which " +
"caused it to " + verb2 + ".")
|
<commit_before><commit_msg>Add a solution for the MadLibs lab<commit_after>
|
# Create a function for adjectives so I don't repeat myself in prompts.
def get_adjective():
return raw_input("Give me an adjective: ")
def get_noun():
return raw_input("Give me a noun: ")
def get_verb():
return raw_input("Give me a verb: ")
adjective1 = get_adjective()
noun1 = get_noun()
verb1 = get_verb()
adjective2 = get_adjective()
noun2 = get_noun()
verb2 = get_verb()
# Use parentheses so Python will "know" the string has multiple lines
print ("At CSSI we were all " + adjective1 + " when a " + noun1 +
" fell through the ceiling. See-Mong tried to " + verb1 + " it but it " +
"was too " + adjective2 + ". Instead, Zack gave it a " + noun2 + " which " +
"caused it to " + verb2 + ".")
|
Add a solution for the MadLibs lab# Create a function for adjectives so I don't repeat myself in prompts.
def get_adjective():
return raw_input("Give me an adjective: ")
def get_noun():
return raw_input("Give me a noun: ")
def get_verb():
return raw_input("Give me a verb: ")
adjective1 = get_adjective()
noun1 = get_noun()
verb1 = get_verb()
adjective2 = get_adjective()
noun2 = get_noun()
verb2 = get_verb()
# Use parentheses so Python will "know" the string has multiple lines
print ("At CSSI we were all " + adjective1 + " when a " + noun1 +
" fell through the ceiling. See-Mong tried to " + verb1 + " it but it " +
"was too " + adjective2 + ". Instead, Zack gave it a " + noun2 + " which " +
"caused it to " + verb2 + ".")
|
<commit_before><commit_msg>Add a solution for the MadLibs lab<commit_after># Create a function for adjectives so I don't repeat myself in prompts.
def get_adjective():
return raw_input("Give me an adjective: ")
def get_noun():
return raw_input("Give me a noun: ")
def get_verb():
return raw_input("Give me a verb: ")
adjective1 = get_adjective()
noun1 = get_noun()
verb1 = get_verb()
adjective2 = get_adjective()
noun2 = get_noun()
verb2 = get_verb()
# Use parentheses so Python will "know" the string has multiple lines
print ("At CSSI we were all " + adjective1 + " when a " + noun1 +
" fell through the ceiling. See-Mong tried to " + verb1 + " it but it " +
"was too " + adjective2 + ". Instead, Zack gave it a " + noun2 + " which " +
"caused it to " + verb2 + ".")
|
|
77b34390345208a6e0bc5ad30cdce62e42ca0c56
|
wafer/management/commands/pycon_speaker_tickets.py
|
wafer/management/commands/pycon_speaker_tickets.py
|
import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = "List speakers and associated tickets."
option_list = BaseCommand.option_list + tuple([
make_option('--speakers', action="store_true", default=False,
help='List speakers and tickets (for accepted talks)'),
make_option('--allspeakers', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = User.objects.filter(talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['allspeakers']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = '%d' % tickets[0].barcode
else:
ticket = 'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
Add simple command to list speakers and tickets
|
Add simple command to list speakers and tickets
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CarlFK/wafer
|
Add simple command to list speakers and tickets
|
import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = "List speakers and associated tickets."
option_list = BaseCommand.option_list + tuple([
make_option('--speakers', action="store_true", default=False,
help='List speakers and tickets (for accepted talks)'),
make_option('--allspeakers', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = User.objects.filter(talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['allspeakers']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = '%d' % tickets[0].barcode
else:
ticket = 'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
<commit_before><commit_msg>Add simple command to list speakers and tickets<commit_after>
|
import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = "List speakers and associated tickets."
option_list = BaseCommand.option_list + tuple([
make_option('--speakers', action="store_true", default=False,
help='List speakers and tickets (for accepted talks)'),
make_option('--allspeakers', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = User.objects.filter(talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['allspeakers']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = '%d' % tickets[0].barcode
else:
ticket = 'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
Add simple command to list speakers and ticketsimport sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = "List speakers and associated tickets."
option_list = BaseCommand.option_list + tuple([
make_option('--speakers', action="store_true", default=False,
help='List speakers and tickets (for accepted talks)'),
make_option('--allspeakers', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = User.objects.filter(talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['allspeakers']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = '%d' % tickets[0].barcode
else:
ticket = 'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
<commit_before><commit_msg>Add simple command to list speakers and tickets<commit_after>import sys
import csv
from optparse import make_option
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from wafer.talks.models import ACCEPTED
class Command(BaseCommand):
help = "List speakers and associated tickets."
option_list = BaseCommand.option_list + tuple([
make_option('--speakers', action="store_true", default=False,
help='List speakers and tickets (for accepted talks)'),
make_option('--allspeakers', action="store_true", default=False,
help='List speakers and tickets (for all talks)'),
])
def _speaker_tickets(self, options):
people = User.objects.filter(talks__isnull=False).distinct()
csv_file = csv.writer(sys.stdout)
for person in people:
# We query talks to filter out the speakers from ordinary
# accounts
if options['allspeakers']:
titles = [x.title for x in person.talks.all()]
else:
titles = [x.title for x in
person.talks.filter(status=ACCEPTED)]
if not titles:
continue
tickets = person.ticket.all()
if tickets:
ticket = '%d' % tickets[0].barcode
else:
ticket = 'NO TICKET PURCHASED'
row = [x.encode("utf-8") for x in (person.get_full_name(),
person.email,
ticket)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._speaker_tickets(options)
|
|
3bbf06964452683d986db401556183f575d15a55
|
insert-project.py
|
insert-project.py
|
#!/usr/bin/env python3
import pymongo
import subprocess
import re
from datetime import datetime
import argparse
from json import load as load_json
import sys
def _info(msg):
sys.stdout.write(msg + '\n')
sys.stdout.flush()
cl_parser = argparse.ArgumentParser(description='Insert a project into Meteor\'s local MongoDB')
cl_parser.add_argument('input', help='JSON input file')
cl_parser.add_argument('--site', default=None, help='Specify Meteor site (default: localhost)')
args = cl_parser.parse_args()
with open(args.input) as input_file:
json = load_json(input_file)
command = ['meteor', 'mongo', '-U']
if args.site:
command.append(args.site)
_info('Getting Mongo URL...')
mongo_url = subprocess.check_output(command).decode().strip()
mongo_url, db_name = mongo_url.rsplit('/', 1)
_info('Connecting to MongoDB: {} (DB: {})'.format(mongo_url, db_name))
client = pymongo.MongoClient(mongo_url)
db = client[db_name]
project = {
'created': datetime.utcnow(),
'owner': json['owner'],
'projectId': json['id'],
'tags': json['tags'],
'text': json['description'],
'title': json['title'],
'instructions': json['instructions'],
'pictures': json['pictures'],
'files': json['files'],
'license': json['license'],
}
db.projects.update({'owner': project['owner'], 'projectId': project['projectId']}, project,
upsert=True)
_info('Successfully inserted project \'{}/{}\' ({})'.format(
project['owner'],
project['projectId'],
project['title'],
))
|
Add script for inserting project into DB
|
Add script for inserting project into DB
|
Python
|
mit
|
muzhack/muzhack,muzhack/muzhack,praneybehl/muzhack,muzhack/musitechhub,praneybehl/muzhack,praneybehl/muzhack,muzhack/musitechhub,praneybehl/muzhack,muzhack/muzhack,muzhack/muzhack,muzhack/musitechhub,muzhack/musitechhub
|
Add script for inserting project into DB
|
#!/usr/bin/env python3
import pymongo
import subprocess
import re
from datetime import datetime
import argparse
from json import load as load_json
import sys
def _info(msg):
sys.stdout.write(msg + '\n')
sys.stdout.flush()
cl_parser = argparse.ArgumentParser(description='Insert a project into Meteor\'s local MongoDB')
cl_parser.add_argument('input', help='JSON input file')
cl_parser.add_argument('--site', default=None, help='Specify Meteor site (default: localhost)')
args = cl_parser.parse_args()
with open(args.input) as input_file:
json = load_json(input_file)
command = ['meteor', 'mongo', '-U']
if args.site:
command.append(args.site)
_info('Getting Mongo URL...')
mongo_url = subprocess.check_output(command).decode().strip()
mongo_url, db_name = mongo_url.rsplit('/', 1)
_info('Connecting to MongoDB: {} (DB: {})'.format(mongo_url, db_name))
client = pymongo.MongoClient(mongo_url)
db = client[db_name]
project = {
'created': datetime.utcnow(),
'owner': json['owner'],
'projectId': json['id'],
'tags': json['tags'],
'text': json['description'],
'title': json['title'],
'instructions': json['instructions'],
'pictures': json['pictures'],
'files': json['files'],
'license': json['license'],
}
db.projects.update({'owner': project['owner'], 'projectId': project['projectId']}, project,
upsert=True)
_info('Successfully inserted project \'{}/{}\' ({})'.format(
project['owner'],
project['projectId'],
project['title'],
))
|
<commit_before><commit_msg>Add script for inserting project into DB<commit_after>
|
#!/usr/bin/env python3
import pymongo
import subprocess
import re
from datetime import datetime
import argparse
from json import load as load_json
import sys
def _info(msg):
sys.stdout.write(msg + '\n')
sys.stdout.flush()
cl_parser = argparse.ArgumentParser(description='Insert a project into Meteor\'s local MongoDB')
cl_parser.add_argument('input', help='JSON input file')
cl_parser.add_argument('--site', default=None, help='Specify Meteor site (default: localhost)')
args = cl_parser.parse_args()
with open(args.input) as input_file:
json = load_json(input_file)
command = ['meteor', 'mongo', '-U']
if args.site:
command.append(args.site)
_info('Getting Mongo URL...')
mongo_url = subprocess.check_output(command).decode().strip()
mongo_url, db_name = mongo_url.rsplit('/', 1)
_info('Connecting to MongoDB: {} (DB: {})'.format(mongo_url, db_name))
client = pymongo.MongoClient(mongo_url)
db = client[db_name]
project = {
'created': datetime.utcnow(),
'owner': json['owner'],
'projectId': json['id'],
'tags': json['tags'],
'text': json['description'],
'title': json['title'],
'instructions': json['instructions'],
'pictures': json['pictures'],
'files': json['files'],
'license': json['license'],
}
db.projects.update({'owner': project['owner'], 'projectId': project['projectId']}, project,
upsert=True)
_info('Successfully inserted project \'{}/{}\' ({})'.format(
project['owner'],
project['projectId'],
project['title'],
))
|
Add script for inserting project into DB#!/usr/bin/env python3
import pymongo
import subprocess
import re
from datetime import datetime
import argparse
from json import load as load_json
import sys
def _info(msg):
sys.stdout.write(msg + '\n')
sys.stdout.flush()
cl_parser = argparse.ArgumentParser(description='Insert a project into Meteor\'s local MongoDB')
cl_parser.add_argument('input', help='JSON input file')
cl_parser.add_argument('--site', default=None, help='Specify Meteor site (default: localhost)')
args = cl_parser.parse_args()
with open(args.input) as input_file:
json = load_json(input_file)
command = ['meteor', 'mongo', '-U']
if args.site:
command.append(args.site)
_info('Getting Mongo URL...')
mongo_url = subprocess.check_output(command).decode().strip()
mongo_url, db_name = mongo_url.rsplit('/', 1)
_info('Connecting to MongoDB: {} (DB: {})'.format(mongo_url, db_name))
client = pymongo.MongoClient(mongo_url)
db = client[db_name]
project = {
'created': datetime.utcnow(),
'owner': json['owner'],
'projectId': json['id'],
'tags': json['tags'],
'text': json['description'],
'title': json['title'],
'instructions': json['instructions'],
'pictures': json['pictures'],
'files': json['files'],
'license': json['license'],
}
db.projects.update({'owner': project['owner'], 'projectId': project['projectId']}, project,
upsert=True)
_info('Successfully inserted project \'{}/{}\' ({})'.format(
project['owner'],
project['projectId'],
project['title'],
))
|
<commit_before><commit_msg>Add script for inserting project into DB<commit_after>#!/usr/bin/env python3
import pymongo
import subprocess
import re
from datetime import datetime
import argparse
from json import load as load_json
import sys
def _info(msg):
sys.stdout.write(msg + '\n')
sys.stdout.flush()
cl_parser = argparse.ArgumentParser(description='Insert a project into Meteor\'s local MongoDB')
cl_parser.add_argument('input', help='JSON input file')
cl_parser.add_argument('--site', default=None, help='Specify Meteor site (default: localhost)')
args = cl_parser.parse_args()
with open(args.input) as input_file:
json = load_json(input_file)
command = ['meteor', 'mongo', '-U']
if args.site:
command.append(args.site)
_info('Getting Mongo URL...')
mongo_url = subprocess.check_output(command).decode().strip()
mongo_url, db_name = mongo_url.rsplit('/', 1)
_info('Connecting to MongoDB: {} (DB: {})'.format(mongo_url, db_name))
client = pymongo.MongoClient(mongo_url)
db = client[db_name]
project = {
'created': datetime.utcnow(),
'owner': json['owner'],
'projectId': json['id'],
'tags': json['tags'],
'text': json['description'],
'title': json['title'],
'instructions': json['instructions'],
'pictures': json['pictures'],
'files': json['files'],
'license': json['license'],
}
db.projects.update({'owner': project['owner'], 'projectId': project['projectId']}, project,
upsert=True)
_info('Successfully inserted project \'{}/{}\' ({})'.format(
project['owner'],
project['projectId'],
project['title'],
))
|
|
8fe73523b7141f93d8523e56a7c6a5cc2ed82051
|
src/collectors/iodrivesnmp/test/testiodrivesnmp.py
|
src/collectors/iodrivesnmp/test/testiodrivesnmp.py
|
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
Test case for ioddrivesnmp class
|
Test case for ioddrivesnmp class
|
Python
|
mit
|
datafiniti/Diamond,stuartbfox/Diamond,joel-airspring/Diamond,disqus/Diamond,Netuitive/Diamond,metamx/Diamond,cannium/Diamond,hvnsweeting/Diamond,MichaelDoyle/Diamond,MediaMath/Diamond,Precis/Diamond,anandbhoraskar/Diamond,socialwareinc/Diamond,joel-airspring/Diamond,h00dy/Diamond,mfriedenhagen/Diamond,socialwareinc/Diamond,CYBERBUGJR/Diamond,tuenti/Diamond,Netuitive/netuitive-diamond,tellapart/Diamond,Ssawa/Diamond,metamx/Diamond,Ensighten/Diamond,codepython/Diamond,jaingaurav/Diamond,EzyInsights/Diamond,cannium/Diamond,thardie/Diamond,tusharmakkar08/Diamond,socialwareinc/Diamond,actmd/Diamond,Slach/Diamond,eMerzh/Diamond-1,signalfx/Diamond,mzupan/Diamond,codepython/Diamond,sebbrandt87/Diamond,anandbhoraskar/Diamond,janisz/Diamond-1,works-mobile/Diamond,skbkontur/Diamond,Nihn/Diamond-1,Ormod/Diamond,disqus/Diamond,mzupan/Diamond,works-mobile/Diamond,socialwareinc/Diamond,dcsquared13/Diamond,krbaker/Diamond,TAKEALOT/Diamond,hamelg/Diamond,codepython/Diamond,MediaMath/Diamond,Netuitive/netuitive-diamond,Basis/Diamond,acquia/Diamond,TAKEALOT/Diamond,Nihn/Diamond-1,saucelabs/Diamond,saucelabs/Diamond,TinLe/Diamond,timchenxiaoyu/Diamond,actmd/Diamond,Basis/Diamond,acquia/Diamond,Ssawa/Diamond,eMerzh/Diamond-1,jriguera/Diamond,dcsquared13/Diamond,timchenxiaoyu/Diamond,zoidbergwill/Diamond,TinLe/Diamond,thardie/Diamond,thardie/Diamond,tellapart/Diamond,jumping/Diamond,Ensighten/Diamond,works-mobile/Diamond,ceph/Diamond,szibis/Diamond,dcsquared13/Diamond,MediaMath/Diamond,datafiniti/Diamond,szibis/Diamond,hamelg/Diamond,Clever/Diamond,works-mobile/Diamond,eMerzh/Diamond-1,Slach/Diamond,metamx/Diamond,CYBERBUGJR/Diamond,Ssawa/Diamond,Ensighten/Diamond,cannium/Diamond,Basis/Diamond,russss/Diamond,python-diamond/Diamond,EzyInsights/Diamond,bmhatfield/Diamond,MediaMath/Diamond,Ormod/Diamond,skbkontur/Diamond,eMerzh/Diamond-1,russss/Diamond,jumping/Diamond,MichaelDoyle/Diamond,mzupan/Diamond,ceph/Diamond,ceph/Diamond,Netuitive/Diamond,tusharmakkar08/Diamond,Ensighten/Diamond,h00dy/Diamond,EzyInsights/Diamond,Netuitive/netuitive-diamond,janisz/Diamond-1,Nihn/Diamond-1,rtoma/Diamond,jriguera/Diamond,skbkontur/Diamond,TAKEALOT/Diamond,ceph/Diamond,Clever/Diamond,tusharmakkar08/Diamond,Ormod/Diamond,sebbrandt87/Diamond,timchenxiaoyu/Diamond,russss/Diamond,Slach/Diamond,python-diamond/Diamond,python-diamond/Diamond,Netuitive/Diamond,h00dy/Diamond,dcsquared13/Diamond,sebbrandt87/Diamond,tusharmakkar08/Diamond,saucelabs/Diamond,jriguera/Diamond,ramjothikumar/Diamond,szibis/Diamond,ramjothikumar/Diamond,skbkontur/Diamond,janisz/Diamond-1,tuenti/Diamond,Clever/Diamond,tuenti/Diamond,mfriedenhagen/Diamond,h00dy/Diamond,zoidbergwill/Diamond,jumping/Diamond,MichaelDoyle/Diamond,actmd/Diamond,stuartbfox/Diamond,TinLe/Diamond,Ormod/Diamond,gg7/diamond,rtoma/Diamond,tellapart/Diamond,jaingaurav/Diamond,hvnsweeting/Diamond,timchenxiaoyu/Diamond,cannium/Diamond,Clever/Diamond,datafiniti/Diamond,jaingaurav/Diamond,tuenti/Diamond,jriguera/Diamond,MichaelDoyle/Diamond,sebbrandt87/Diamond,krbaker/Diamond,acquia/Diamond,Slach/Diamond,zoidbergwill/Diamond,gg7/diamond,anandbhoraskar/Diamond,disqus/Diamond,bmhatfield/Diamond,ramjothikumar/Diamond,acquia/Diamond,CYBERBUGJR/Diamond,hamelg/Diamond,gg7/diamond,Precis/Diamond,joel-airspring/Diamond,thardie/Diamond,datafiniti/Diamond,Precis/Diamond,Basis/Diamond,signalfx/Diamond,hvnsweeting/Diamond,signalfx/Diamond,rtoma/Diamond,krbaker/Diamond,Netuitive/Diamond,gg7/diamond,stuartbfox/Diamond,mfriedenhagen/Diamond,EzyInsights/Diamond,Nihn/Diamond-1,tellapart/Diamond,bmhatfield/Diamond,szibis/Diamond,signalfx/Diamond,zoidbergwill/Diamond,mzupan/Diamond,bmhatfield/Diamond,janisz/Diamond-1,krbaker/Diamond,codepython/Diamond,TAKEALOT/Diamond,jumping/Diamond,CYBERBUGJR/Diamond,ramjothikumar/Diamond,hvnsweeting/Diamond,hamelg/Diamond,saucelabs/Diamond,Netuitive/netuitive-diamond,stuartbfox/Diamond,TinLe/Diamond,rtoma/Diamond,jaingaurav/Diamond,joel-airspring/Diamond,anandbhoraskar/Diamond,Precis/Diamond,russss/Diamond,Ssawa/Diamond,mfriedenhagen/Diamond,actmd/Diamond
|
Test case for ioddrivesnmp class
|
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
<commit_before><commit_msg>Test case for ioddrivesnmp class<commit_after>
|
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
Test case for ioddrivesnmp class#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
<commit_before><commit_msg>Test case for ioddrivesnmp class<commit_after>#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
|
7ec15caf8f2c9d0a21581261a356f6decc548061
|
test/ui_test.py
|
test/ui_test.py
|
from app import app
import unittest
class UiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_index(self):
self.assertEqual(self.app.get('/').status_code, 200)
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200)
def test_all_pages(self):
self.assertEqual(self.app.get('/.all-pages').status_code, 200)
def test_edit(self):
self.assertEqual(self.app.get('/.edit/Index').status_code, 200)
self.assertEqual(self.app.get('/.edit/').status_code, 404)
|
Add some basic UI tests
|
Add some basic UI tests
|
Python
|
agpl-3.0
|
spacewiki/spacewiki,tdfischer/spacewiki,spacewiki/spacewiki,spacewiki/spacewiki,tdfischer/spacewiki,tdfischer/spacewiki,tdfischer/spacewiki,spacewiki/spacewiki
|
Add some basic UI tests
|
from app import app
import unittest
class UiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_index(self):
self.assertEqual(self.app.get('/').status_code, 200)
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200)
def test_all_pages(self):
self.assertEqual(self.app.get('/.all-pages').status_code, 200)
def test_edit(self):
self.assertEqual(self.app.get('/.edit/Index').status_code, 200)
self.assertEqual(self.app.get('/.edit/').status_code, 404)
|
<commit_before><commit_msg>Add some basic UI tests<commit_after>
|
from app import app
import unittest
class UiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_index(self):
self.assertEqual(self.app.get('/').status_code, 200)
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200)
def test_all_pages(self):
self.assertEqual(self.app.get('/.all-pages').status_code, 200)
def test_edit(self):
self.assertEqual(self.app.get('/.edit/Index').status_code, 200)
self.assertEqual(self.app.get('/.edit/').status_code, 404)
|
Add some basic UI testsfrom app import app
import unittest
class UiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_index(self):
self.assertEqual(self.app.get('/').status_code, 200)
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200)
def test_all_pages(self):
self.assertEqual(self.app.get('/.all-pages').status_code, 200)
def test_edit(self):
self.assertEqual(self.app.get('/.edit/Index').status_code, 200)
self.assertEqual(self.app.get('/.edit/').status_code, 404)
|
<commit_before><commit_msg>Add some basic UI tests<commit_after>from app import app
import unittest
class UiTestCase(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
def test_index(self):
self.assertEqual(self.app.get('/').status_code, 200)
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200)
def test_all_pages(self):
self.assertEqual(self.app.get('/.all-pages').status_code, 200)
def test_edit(self):
self.assertEqual(self.app.get('/.edit/Index').status_code, 200)
self.assertEqual(self.app.get('/.edit/').status_code, 404)
|
|
7ddfb39256229aa8c985ed8d70a29479187c76ad
|
lily/management/commands/generate_beta_invites.py
|
lily/management/commands/generate_beta_invites.py
|
import csv
import gc
import logging
from datetime import date
from hashlib import sha256
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse_lazy
from lily.tenant.models import Tenant
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, **kwargs):
current_site = 'app.hellolily.com'
with default_storage.open('beta_signups_with_invites.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['company', 'email', 'first_name', 'last_name', 'invite', 'country'])
for row in self.read_csvfile('beta_signups.csv'):
company = row['company']
first_name = row['first_name']
last_name = row['last_name']
email = row['email']
country = row['country']
date_string = date.today().strftime('%d%m%Y')
tenant = Tenant.objects.create(name=company, country=country)
call_command('create_tenant', tenant=tenant.id)
invite_hash = sha256('%s-%s-%s-%s' % (
tenant.id,
email,
date_string,
settings.SECRET_KEY
)).hexdigest()
invite_link = '%s://%s%s' % ('https', current_site, reverse_lazy('invitation_accept', kwargs={
'tenant_id': tenant.id,
'first_name': first_name,
'email': email,
'date': date_string,
'hash': invite_hash,
}))
spamwriter.writerow([company, email, first_name, last_name, invite_link, country])
gc.collect()
def read_csvfile(self, file_name):
"""
Read from path assuming it's a file with ';' separated values.
"""
# Newlines are breaking correct csv parsing. Write correct temporary file to parse.
csv_file = default_storage.open(file_name, 'rU')
reader = csv.DictReader(csv_file, delimiter=';', quoting=csv.QUOTE_ALL)
for row in reader:
yield row
|
Create script for beta invites
|
LILY-2366: Create script for beta invites
|
Python
|
agpl-3.0
|
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
|
LILY-2366: Create script for beta invites
|
import csv
import gc
import logging
from datetime import date
from hashlib import sha256
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse_lazy
from lily.tenant.models import Tenant
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, **kwargs):
current_site = 'app.hellolily.com'
with default_storage.open('beta_signups_with_invites.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['company', 'email', 'first_name', 'last_name', 'invite', 'country'])
for row in self.read_csvfile('beta_signups.csv'):
company = row['company']
first_name = row['first_name']
last_name = row['last_name']
email = row['email']
country = row['country']
date_string = date.today().strftime('%d%m%Y')
tenant = Tenant.objects.create(name=company, country=country)
call_command('create_tenant', tenant=tenant.id)
invite_hash = sha256('%s-%s-%s-%s' % (
tenant.id,
email,
date_string,
settings.SECRET_KEY
)).hexdigest()
invite_link = '%s://%s%s' % ('https', current_site, reverse_lazy('invitation_accept', kwargs={
'tenant_id': tenant.id,
'first_name': first_name,
'email': email,
'date': date_string,
'hash': invite_hash,
}))
spamwriter.writerow([company, email, first_name, last_name, invite_link, country])
gc.collect()
def read_csvfile(self, file_name):
"""
Read from path assuming it's a file with ';' separated values.
"""
# Newlines are breaking correct csv parsing. Write correct temporary file to parse.
csv_file = default_storage.open(file_name, 'rU')
reader = csv.DictReader(csv_file, delimiter=';', quoting=csv.QUOTE_ALL)
for row in reader:
yield row
|
<commit_before><commit_msg>LILY-2366: Create script for beta invites<commit_after>
|
import csv
import gc
import logging
from datetime import date
from hashlib import sha256
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse_lazy
from lily.tenant.models import Tenant
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, **kwargs):
current_site = 'app.hellolily.com'
with default_storage.open('beta_signups_with_invites.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['company', 'email', 'first_name', 'last_name', 'invite', 'country'])
for row in self.read_csvfile('beta_signups.csv'):
company = row['company']
first_name = row['first_name']
last_name = row['last_name']
email = row['email']
country = row['country']
date_string = date.today().strftime('%d%m%Y')
tenant = Tenant.objects.create(name=company, country=country)
call_command('create_tenant', tenant=tenant.id)
invite_hash = sha256('%s-%s-%s-%s' % (
tenant.id,
email,
date_string,
settings.SECRET_KEY
)).hexdigest()
invite_link = '%s://%s%s' % ('https', current_site, reverse_lazy('invitation_accept', kwargs={
'tenant_id': tenant.id,
'first_name': first_name,
'email': email,
'date': date_string,
'hash': invite_hash,
}))
spamwriter.writerow([company, email, first_name, last_name, invite_link, country])
gc.collect()
def read_csvfile(self, file_name):
"""
Read from path assuming it's a file with ';' separated values.
"""
# Newlines are breaking correct csv parsing. Write correct temporary file to parse.
csv_file = default_storage.open(file_name, 'rU')
reader = csv.DictReader(csv_file, delimiter=';', quoting=csv.QUOTE_ALL)
for row in reader:
yield row
|
LILY-2366: Create script for beta invitesimport csv
import gc
import logging
from datetime import date
from hashlib import sha256
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse_lazy
from lily.tenant.models import Tenant
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, **kwargs):
current_site = 'app.hellolily.com'
with default_storage.open('beta_signups_with_invites.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['company', 'email', 'first_name', 'last_name', 'invite', 'country'])
for row in self.read_csvfile('beta_signups.csv'):
company = row['company']
first_name = row['first_name']
last_name = row['last_name']
email = row['email']
country = row['country']
date_string = date.today().strftime('%d%m%Y')
tenant = Tenant.objects.create(name=company, country=country)
call_command('create_tenant', tenant=tenant.id)
invite_hash = sha256('%s-%s-%s-%s' % (
tenant.id,
email,
date_string,
settings.SECRET_KEY
)).hexdigest()
invite_link = '%s://%s%s' % ('https', current_site, reverse_lazy('invitation_accept', kwargs={
'tenant_id': tenant.id,
'first_name': first_name,
'email': email,
'date': date_string,
'hash': invite_hash,
}))
spamwriter.writerow([company, email, first_name, last_name, invite_link, country])
gc.collect()
def read_csvfile(self, file_name):
"""
Read from path assuming it's a file with ';' separated values.
"""
# Newlines are breaking correct csv parsing. Write correct temporary file to parse.
csv_file = default_storage.open(file_name, 'rU')
reader = csv.DictReader(csv_file, delimiter=';', quoting=csv.QUOTE_ALL)
for row in reader:
yield row
|
<commit_before><commit_msg>LILY-2366: Create script for beta invites<commit_after>import csv
import gc
import logging
from datetime import date
from hashlib import sha256
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse_lazy
from lily.tenant.models import Tenant
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, **kwargs):
current_site = 'app.hellolily.com'
with default_storage.open('beta_signups_with_invites.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
spamwriter.writerow(['company', 'email', 'first_name', 'last_name', 'invite', 'country'])
for row in self.read_csvfile('beta_signups.csv'):
company = row['company']
first_name = row['first_name']
last_name = row['last_name']
email = row['email']
country = row['country']
date_string = date.today().strftime('%d%m%Y')
tenant = Tenant.objects.create(name=company, country=country)
call_command('create_tenant', tenant=tenant.id)
invite_hash = sha256('%s-%s-%s-%s' % (
tenant.id,
email,
date_string,
settings.SECRET_KEY
)).hexdigest()
invite_link = '%s://%s%s' % ('https', current_site, reverse_lazy('invitation_accept', kwargs={
'tenant_id': tenant.id,
'first_name': first_name,
'email': email,
'date': date_string,
'hash': invite_hash,
}))
spamwriter.writerow([company, email, first_name, last_name, invite_link, country])
gc.collect()
def read_csvfile(self, file_name):
"""
Read from path assuming it's a file with ';' separated values.
"""
# Newlines are breaking correct csv parsing. Write correct temporary file to parse.
csv_file = default_storage.open(file_name, 'rU')
reader = csv.DictReader(csv_file, delimiter=';', quoting=csv.QUOTE_ALL)
for row in reader:
yield row
|
|
5bc089a98bf578fd0c56e3e50cf76888ee74aba2
|
py/complex-number-multiplication.py
|
py/complex-number-multiplication.py
|
import re
class Solution(object):
def complexNumberMultiply(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
pat = re.compile(r'(-?\d+)\+(-?\d+)i')
mata = pat.match(a)
matb = pat.match(b)
a = int(mata.group(1)), int(mata.group(2))
b = int(matb.group(1)), int(matb.group(2))
ans = a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]
return '%d+%di' % ans
|
Add py solution for 537. Complex Number Multiplication
|
Add py solution for 537. Complex Number Multiplication
537. Complex Number Multiplication: https://leetcode.com/problems/complex-number-multiplication/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 537. Complex Number Multiplication
537. Complex Number Multiplication: https://leetcode.com/problems/complex-number-multiplication/
|
import re
class Solution(object):
def complexNumberMultiply(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
pat = re.compile(r'(-?\d+)\+(-?\d+)i')
mata = pat.match(a)
matb = pat.match(b)
a = int(mata.group(1)), int(mata.group(2))
b = int(matb.group(1)), int(matb.group(2))
ans = a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]
return '%d+%di' % ans
|
<commit_before><commit_msg>Add py solution for 537. Complex Number Multiplication
537. Complex Number Multiplication: https://leetcode.com/problems/complex-number-multiplication/<commit_after>
|
import re
class Solution(object):
def complexNumberMultiply(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
pat = re.compile(r'(-?\d+)\+(-?\d+)i')
mata = pat.match(a)
matb = pat.match(b)
a = int(mata.group(1)), int(mata.group(2))
b = int(matb.group(1)), int(matb.group(2))
ans = a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]
return '%d+%di' % ans
|
Add py solution for 537. Complex Number Multiplication
537. Complex Number Multiplication: https://leetcode.com/problems/complex-number-multiplication/import re
class Solution(object):
def complexNumberMultiply(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
pat = re.compile(r'(-?\d+)\+(-?\d+)i')
mata = pat.match(a)
matb = pat.match(b)
a = int(mata.group(1)), int(mata.group(2))
b = int(matb.group(1)), int(matb.group(2))
ans = a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]
return '%d+%di' % ans
|
<commit_before><commit_msg>Add py solution for 537. Complex Number Multiplication
537. Complex Number Multiplication: https://leetcode.com/problems/complex-number-multiplication/<commit_after>import re
class Solution(object):
def complexNumberMultiply(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
pat = re.compile(r'(-?\d+)\+(-?\d+)i')
mata = pat.match(a)
matb = pat.match(b)
a = int(mata.group(1)), int(mata.group(2))
b = int(matb.group(1)), int(matb.group(2))
ans = a[0] * b[0] - a[1] * b[1], a[1] * b[0] + a[0] * b[1]
return '%d+%di' % ans
|
|
06e82c471afa83bf0f08f0779b32dd8a09b8d1ba
|
py/intersection-of-two-arrays-ii.py
|
py/intersection-of-two-arrays-ii.py
|
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1, c2 = Counter(nums1), Counter(nums2)
return list((c1 & c2).elements())
|
Add py solution for 350. Intersection of Two Arrays II
|
Add py solution for 350. Intersection of Two Arrays II
350. Intersection of Two Arrays II: https://leetcode.com/problems/intersection-of-two-arrays-ii/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 350. Intersection of Two Arrays II
350. Intersection of Two Arrays II: https://leetcode.com/problems/intersection-of-two-arrays-ii/
|
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1, c2 = Counter(nums1), Counter(nums2)
return list((c1 & c2).elements())
|
<commit_before><commit_msg>Add py solution for 350. Intersection of Two Arrays II
350. Intersection of Two Arrays II: https://leetcode.com/problems/intersection-of-two-arrays-ii/<commit_after>
|
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1, c2 = Counter(nums1), Counter(nums2)
return list((c1 & c2).elements())
|
Add py solution for 350. Intersection of Two Arrays II
350. Intersection of Two Arrays II: https://leetcode.com/problems/intersection-of-two-arrays-ii/from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1, c2 = Counter(nums1), Counter(nums2)
return list((c1 & c2).elements())
|
<commit_before><commit_msg>Add py solution for 350. Intersection of Two Arrays II
350. Intersection of Two Arrays II: https://leetcode.com/problems/intersection-of-two-arrays-ii/<commit_after>from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1, c2 = Counter(nums1), Counter(nums2)
return list((c1 & c2).elements())
|
|
9d7c348170fc0f9d339a2ef57a9e64b1ceaa7516
|
web/whim/core/scrapers/mnh.py
|
web/whim/core/scrapers/mnh.py
|
from datetime import datetime, timezone, time
import requests
from bs4 import BeautifulSoup
from django.db import transaction
from .base import BaseScraper
from .exceptions import ScraperException
from whim.core.models import Event, Source, Category
from whim.core.utils import get_object_or_none
from whim.core.time import zero_time_with_timezone
class MNHScraper(BaseScraper):
def get_data(self):
url = "https://manxnationalheritage.im/whats-on/"
parsed = []
page = requests.get(url)
if page.status_code == 200:
soup = BeautifulSoup(page.content, 'html.parser')
events = soup.select(
"div.columns.no-padding-grid.push-top-m > div > a")
parsed = []
for e in events:
tmp = {
"link": e.get('href'),
"category": e.find("span", {"class": "badge"}).string
}
#get rest of data
article = e.find("div", {"class": "text"})
if article:
tmp["name"] = article.contents[0].string #h2
tmp["description"] = article.contents[3].contents[
0].string #p
#dates
try:
dates = article.contents[2].contents[0].string.replace(
" ", "").replace("–", "-").split("-") #span
tmp["start_date"] = zero_time_with_timezone(
datetime.strptime(dates[0], "%d/%m/%Y"))
if len(dates) > 1:
tmp["end_date"] = zero_time_with_timezone(
datetime.strptime(dates[1], "%d/%m/%Y"))
except:
continue
parsed.append(tmp)
return parsed
else:
raise ScraperException("Unexpected status code")
@transaction.atomic
def run(self, source_id):
source = Source.objects.get(id=source_id)
for scraped_event in self.get_data():
event = get_object_or_none(
Event, source=source, name=scraped_event["name"])
if event is None:
category, _ = Category.objects.get_or_create_from_name(
scraped_event["category"])
Event.objects.create(
source=source,
category=category,
name=scraped_event["name"],
description=scraped_event["description"],
start_datetime=scraped_event["start_date"],
end_datetime=scraped_event.get("end_date"),
link=scraped_event["link"],
tags=[])
#mark this run
source.last_run_date = datetime.now(timezone.utc)
source.save()
|
Add demo MNH event scraper
|
Add demo MNH event scraper
|
Python
|
mit
|
andrewgleave/whim,andrewgleave/whim,andrewgleave/whim
|
Add demo MNH event scraper
|
from datetime import datetime, timezone, time
import requests
from bs4 import BeautifulSoup
from django.db import transaction
from .base import BaseScraper
from .exceptions import ScraperException
from whim.core.models import Event, Source, Category
from whim.core.utils import get_object_or_none
from whim.core.time import zero_time_with_timezone
class MNHScraper(BaseScraper):
def get_data(self):
url = "https://manxnationalheritage.im/whats-on/"
parsed = []
page = requests.get(url)
if page.status_code == 200:
soup = BeautifulSoup(page.content, 'html.parser')
events = soup.select(
"div.columns.no-padding-grid.push-top-m > div > a")
parsed = []
for e in events:
tmp = {
"link": e.get('href'),
"category": e.find("span", {"class": "badge"}).string
}
#get rest of data
article = e.find("div", {"class": "text"})
if article:
tmp["name"] = article.contents[0].string #h2
tmp["description"] = article.contents[3].contents[
0].string #p
#dates
try:
dates = article.contents[2].contents[0].string.replace(
" ", "").replace("–", "-").split("-") #span
tmp["start_date"] = zero_time_with_timezone(
datetime.strptime(dates[0], "%d/%m/%Y"))
if len(dates) > 1:
tmp["end_date"] = zero_time_with_timezone(
datetime.strptime(dates[1], "%d/%m/%Y"))
except:
continue
parsed.append(tmp)
return parsed
else:
raise ScraperException("Unexpected status code")
@transaction.atomic
def run(self, source_id):
source = Source.objects.get(id=source_id)
for scraped_event in self.get_data():
event = get_object_or_none(
Event, source=source, name=scraped_event["name"])
if event is None:
category, _ = Category.objects.get_or_create_from_name(
scraped_event["category"])
Event.objects.create(
source=source,
category=category,
name=scraped_event["name"],
description=scraped_event["description"],
start_datetime=scraped_event["start_date"],
end_datetime=scraped_event.get("end_date"),
link=scraped_event["link"],
tags=[])
#mark this run
source.last_run_date = datetime.now(timezone.utc)
source.save()
|
<commit_before><commit_msg>Add demo MNH event scraper<commit_after>
|
from datetime import datetime, timezone, time
import requests
from bs4 import BeautifulSoup
from django.db import transaction
from .base import BaseScraper
from .exceptions import ScraperException
from whim.core.models import Event, Source, Category
from whim.core.utils import get_object_or_none
from whim.core.time import zero_time_with_timezone
class MNHScraper(BaseScraper):
def get_data(self):
url = "https://manxnationalheritage.im/whats-on/"
parsed = []
page = requests.get(url)
if page.status_code == 200:
soup = BeautifulSoup(page.content, 'html.parser')
events = soup.select(
"div.columns.no-padding-grid.push-top-m > div > a")
parsed = []
for e in events:
tmp = {
"link": e.get('href'),
"category": e.find("span", {"class": "badge"}).string
}
#get rest of data
article = e.find("div", {"class": "text"})
if article:
tmp["name"] = article.contents[0].string #h2
tmp["description"] = article.contents[3].contents[
0].string #p
#dates
try:
dates = article.contents[2].contents[0].string.replace(
" ", "").replace("–", "-").split("-") #span
tmp["start_date"] = zero_time_with_timezone(
datetime.strptime(dates[0], "%d/%m/%Y"))
if len(dates) > 1:
tmp["end_date"] = zero_time_with_timezone(
datetime.strptime(dates[1], "%d/%m/%Y"))
except:
continue
parsed.append(tmp)
return parsed
else:
raise ScraperException("Unexpected status code")
@transaction.atomic
def run(self, source_id):
source = Source.objects.get(id=source_id)
for scraped_event in self.get_data():
event = get_object_or_none(
Event, source=source, name=scraped_event["name"])
if event is None:
category, _ = Category.objects.get_or_create_from_name(
scraped_event["category"])
Event.objects.create(
source=source,
category=category,
name=scraped_event["name"],
description=scraped_event["description"],
start_datetime=scraped_event["start_date"],
end_datetime=scraped_event.get("end_date"),
link=scraped_event["link"],
tags=[])
#mark this run
source.last_run_date = datetime.now(timezone.utc)
source.save()
|
Add demo MNH event scraperfrom datetime import datetime, timezone, time
import requests
from bs4 import BeautifulSoup
from django.db import transaction
from .base import BaseScraper
from .exceptions import ScraperException
from whim.core.models import Event, Source, Category
from whim.core.utils import get_object_or_none
from whim.core.time import zero_time_with_timezone
class MNHScraper(BaseScraper):
def get_data(self):
url = "https://manxnationalheritage.im/whats-on/"
parsed = []
page = requests.get(url)
if page.status_code == 200:
soup = BeautifulSoup(page.content, 'html.parser')
events = soup.select(
"div.columns.no-padding-grid.push-top-m > div > a")
parsed = []
for e in events:
tmp = {
"link": e.get('href'),
"category": e.find("span", {"class": "badge"}).string
}
#get rest of data
article = e.find("div", {"class": "text"})
if article:
tmp["name"] = article.contents[0].string #h2
tmp["description"] = article.contents[3].contents[
0].string #p
#dates
try:
dates = article.contents[2].contents[0].string.replace(
" ", "").replace("–", "-").split("-") #span
tmp["start_date"] = zero_time_with_timezone(
datetime.strptime(dates[0], "%d/%m/%Y"))
if len(dates) > 1:
tmp["end_date"] = zero_time_with_timezone(
datetime.strptime(dates[1], "%d/%m/%Y"))
except:
continue
parsed.append(tmp)
return parsed
else:
raise ScraperException("Unexpected status code")
@transaction.atomic
def run(self, source_id):
source = Source.objects.get(id=source_id)
for scraped_event in self.get_data():
event = get_object_or_none(
Event, source=source, name=scraped_event["name"])
if event is None:
category, _ = Category.objects.get_or_create_from_name(
scraped_event["category"])
Event.objects.create(
source=source,
category=category,
name=scraped_event["name"],
description=scraped_event["description"],
start_datetime=scraped_event["start_date"],
end_datetime=scraped_event.get("end_date"),
link=scraped_event["link"],
tags=[])
#mark this run
source.last_run_date = datetime.now(timezone.utc)
source.save()
|
<commit_before><commit_msg>Add demo MNH event scraper<commit_after>from datetime import datetime, timezone, time
import requests
from bs4 import BeautifulSoup
from django.db import transaction
from .base import BaseScraper
from .exceptions import ScraperException
from whim.core.models import Event, Source, Category
from whim.core.utils import get_object_or_none
from whim.core.time import zero_time_with_timezone
class MNHScraper(BaseScraper):
def get_data(self):
url = "https://manxnationalheritage.im/whats-on/"
parsed = []
page = requests.get(url)
if page.status_code == 200:
soup = BeautifulSoup(page.content, 'html.parser')
events = soup.select(
"div.columns.no-padding-grid.push-top-m > div > a")
parsed = []
for e in events:
tmp = {
"link": e.get('href'),
"category": e.find("span", {"class": "badge"}).string
}
#get rest of data
article = e.find("div", {"class": "text"})
if article:
tmp["name"] = article.contents[0].string #h2
tmp["description"] = article.contents[3].contents[
0].string #p
#dates
try:
dates = article.contents[2].contents[0].string.replace(
" ", "").replace("–", "-").split("-") #span
tmp["start_date"] = zero_time_with_timezone(
datetime.strptime(dates[0], "%d/%m/%Y"))
if len(dates) > 1:
tmp["end_date"] = zero_time_with_timezone(
datetime.strptime(dates[1], "%d/%m/%Y"))
except:
continue
parsed.append(tmp)
return parsed
else:
raise ScraperException("Unexpected status code")
@transaction.atomic
def run(self, source_id):
source = Source.objects.get(id=source_id)
for scraped_event in self.get_data():
event = get_object_or_none(
Event, source=source, name=scraped_event["name"])
if event is None:
category, _ = Category.objects.get_or_create_from_name(
scraped_event["category"])
Event.objects.create(
source=source,
category=category,
name=scraped_event["name"],
description=scraped_event["description"],
start_datetime=scraped_event["start_date"],
end_datetime=scraped_event.get("end_date"),
link=scraped_event["link"],
tags=[])
#mark this run
source.last_run_date = datetime.now(timezone.utc)
source.save()
|
|
84990a4ef20c2e0f42133ed06ade5ce2d4e98ae3
|
chmvh_website/team/models.py
|
chmvh_website/team/models.py
|
from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
Save team member picture with extension.
|
Save team member picture with extension.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
Save team member picture with extension.
|
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
<commit_before>from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
<commit_msg>Save team member picture with extension.<commit_after>
|
import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
Save team member picture with extension.import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
<commit_before>from django.db import models
def team_member_image_name(instance, filename):
return 'team/{0}'.format(instance.name)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
<commit_msg>Save team member picture with extension.<commit_after>import os
from django.db import models
def team_member_image_name(instance, filename):
_, ext = os.path.splitext(filename)
return 'team/{0}{1}'.format(instance.name, ext)
class TeamMember(models.Model):
bio = models.TextField(
verbose_name='biography')
name = models.CharField(
max_length=50,
unique=True,
verbose_name='name')
picture = models.ImageField(
blank=True,
null=True,
upload_to=team_member_image_name)
def __str__(self):
"""Return the team member's name"""
return self.name
|
257134bdaea7c250d5956c4095adf0b917b65aa6
|
database/dict_converters/event_details_converter.py
|
database/dict_converters/event_details_converter.py
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections,
'district_points': event_details.district_points,
'rankings': event_details.renderable_rankings,
'stats': event_details.matchstats,
}
return event_details_dict
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections if event_details else None,
'district_points': event_details.district_points if event_details else None,
'rankings': event_details.renderable_rankings if event_details else None,
'stats': event_details.matchstats if event_details else None,
}
return event_details_dict
|
Fix null case for event details
|
Fix null case for event details
|
Python
|
mit
|
verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections,
'district_points': event_details.district_points,
'rankings': event_details.renderable_rankings,
'stats': event_details.matchstats,
}
return event_details_dict
Fix null case for event details
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections if event_details else None,
'district_points': event_details.district_points if event_details else None,
'rankings': event_details.renderable_rankings if event_details else None,
'stats': event_details.matchstats if event_details else None,
}
return event_details_dict
|
<commit_before>from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections,
'district_points': event_details.district_points,
'rankings': event_details.renderable_rankings,
'stats': event_details.matchstats,
}
return event_details_dict
<commit_msg>Fix null case for event details<commit_after>
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections if event_details else None,
'district_points': event_details.district_points if event_details else None,
'rankings': event_details.renderable_rankings if event_details else None,
'stats': event_details.matchstats if event_details else None,
}
return event_details_dict
|
from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections,
'district_points': event_details.district_points,
'rankings': event_details.renderable_rankings,
'stats': event_details.matchstats,
}
return event_details_dict
Fix null case for event detailsfrom database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections if event_details else None,
'district_points': event_details.district_points if event_details else None,
'rankings': event_details.renderable_rankings if event_details else None,
'stats': event_details.matchstats if event_details else None,
}
return event_details_dict
|
<commit_before>from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections,
'district_points': event_details.district_points,
'rankings': event_details.renderable_rankings,
'stats': event_details.matchstats,
}
return event_details_dict
<commit_msg>Fix null case for event details<commit_after>from database.dict_converters.converter_base import ConverterBase
class EventDetailsConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 0,
}
@classmethod
def convert(cls, event_details, dict_version):
CONVERTERS = {
3: cls.eventDetailsConverter_v3,
}
return CONVERTERS[dict_version](event_details)
@classmethod
def eventDetailsConverter_v3(cls, event_details):
event_details_dict = {
'alliances': event_details.alliance_selections if event_details else None,
'district_points': event_details.district_points if event_details else None,
'rankings': event_details.renderable_rankings if event_details else None,
'stats': event_details.matchstats if event_details else None,
}
return event_details_dict
|
03279bbc6193d3944dcd2542daa65701a1e0eded
|
euler026.py
|
euler026.py
|
#!/usr/bin/python
"""
For resolve this, we have to find the maximum
Full Reptend Prime int he given limit. To do that, we need
to check if the 10 is a primitive root of p.
See http://mathworld.wolfram.com/FullReptendPrime.html for details
"""
from sys import exit
for p in range(999, 7, -2):
for k in range(1, p):
if (10 ** k) % p == 1:
if k != p - 1:
break
else:
print(p)
exit(0)
|
Add solution for problem 26
|
Add solution for problem 26
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 26
|
#!/usr/bin/python
"""
For resolve this, we have to find the maximum
Full Reptend Prime int he given limit. To do that, we need
to check if the 10 is a primitive root of p.
See http://mathworld.wolfram.com/FullReptendPrime.html for details
"""
from sys import exit
for p in range(999, 7, -2):
for k in range(1, p):
if (10 ** k) % p == 1:
if k != p - 1:
break
else:
print(p)
exit(0)
|
<commit_before><commit_msg>Add solution for problem 26<commit_after>
|
#!/usr/bin/python
"""
For resolve this, we have to find the maximum
Full Reptend Prime int he given limit. To do that, we need
to check if the 10 is a primitive root of p.
See http://mathworld.wolfram.com/FullReptendPrime.html for details
"""
from sys import exit
for p in range(999, 7, -2):
for k in range(1, p):
if (10 ** k) % p == 1:
if k != p - 1:
break
else:
print(p)
exit(0)
|
Add solution for problem 26#!/usr/bin/python
"""
For resolve this, we have to find the maximum
Full Reptend Prime int he given limit. To do that, we need
to check if the 10 is a primitive root of p.
See http://mathworld.wolfram.com/FullReptendPrime.html for details
"""
from sys import exit
for p in range(999, 7, -2):
for k in range(1, p):
if (10 ** k) % p == 1:
if k != p - 1:
break
else:
print(p)
exit(0)
|
<commit_before><commit_msg>Add solution for problem 26<commit_after>#!/usr/bin/python
"""
For resolve this, we have to find the maximum
Full Reptend Prime int he given limit. To do that, we need
to check if the 10 is a primitive root of p.
See http://mathworld.wolfram.com/FullReptendPrime.html for details
"""
from sys import exit
for p in range(999, 7, -2):
for k in range(1, p):
if (10 ** k) % p == 1:
if k != p - 1:
break
else:
print(p)
exit(0)
|
|
930a8b1a7c980183df5469627a734033ca39a444
|
shade/tests/functional/test_image.py
|
shade/tests/functional/test_image.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_compute
----------------------------------
Functional tests for `shade` image methods.
"""
import tempfile
import uuid
from shade import openstack_cloud
from shade.tests import base
from shade.tests.functional.util import pick_image
class TestCompute(base.TestCase):
def setUp(self):
super(TestCompute, self).setUp()
# Shell should have OS-* envvars from openrc, typically loaded by job
self.cloud = openstack_cloud()
self.image = pick_image(self.cloud.nova_client.images.list())
def test_create_image(self):
test_image = tempfile.NamedTemporaryFile(delete=False)
test_image.write('\0' * 1024 * 1024)
test_image.close()
image_name = 'test-image-%s' % uuid.uuid4()
try:
self.cloud.create_image(name=image_name,
filename=test_image.name,
disk_format='raw',
container_format='bare',
wait=True)
finally:
self.cloud.delete_image(image_name, wait=True)
|
Add functional tests for create_image
|
Add functional tests for create_image
Change-Id: Iadb70ca764fbc2c8102a988d6e03cf623b6df48d
|
Python
|
apache-2.0
|
openstack-infra/shade,jsmartin/shade,dtroyer/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,openstack-infra/shade,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,jsmartin/shade,openstack/python-openstacksdk
|
Add functional tests for create_image
Change-Id: Iadb70ca764fbc2c8102a988d6e03cf623b6df48d
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_compute
----------------------------------
Functional tests for `shade` image methods.
"""
import tempfile
import uuid
from shade import openstack_cloud
from shade.tests import base
from shade.tests.functional.util import pick_image
class TestCompute(base.TestCase):
def setUp(self):
super(TestCompute, self).setUp()
# Shell should have OS-* envvars from openrc, typically loaded by job
self.cloud = openstack_cloud()
self.image = pick_image(self.cloud.nova_client.images.list())
def test_create_image(self):
test_image = tempfile.NamedTemporaryFile(delete=False)
test_image.write('\0' * 1024 * 1024)
test_image.close()
image_name = 'test-image-%s' % uuid.uuid4()
try:
self.cloud.create_image(name=image_name,
filename=test_image.name,
disk_format='raw',
container_format='bare',
wait=True)
finally:
self.cloud.delete_image(image_name, wait=True)
|
<commit_before><commit_msg>Add functional tests for create_image
Change-Id: Iadb70ca764fbc2c8102a988d6e03cf623b6df48d<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_compute
----------------------------------
Functional tests for `shade` image methods.
"""
import tempfile
import uuid
from shade import openstack_cloud
from shade.tests import base
from shade.tests.functional.util import pick_image
class TestCompute(base.TestCase):
def setUp(self):
super(TestCompute, self).setUp()
# Shell should have OS-* envvars from openrc, typically loaded by job
self.cloud = openstack_cloud()
self.image = pick_image(self.cloud.nova_client.images.list())
def test_create_image(self):
test_image = tempfile.NamedTemporaryFile(delete=False)
test_image.write('\0' * 1024 * 1024)
test_image.close()
image_name = 'test-image-%s' % uuid.uuid4()
try:
self.cloud.create_image(name=image_name,
filename=test_image.name,
disk_format='raw',
container_format='bare',
wait=True)
finally:
self.cloud.delete_image(image_name, wait=True)
|
Add functional tests for create_image
Change-Id: Iadb70ca764fbc2c8102a988d6e03cf623b6df48d# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_compute
----------------------------------
Functional tests for `shade` image methods.
"""
import tempfile
import uuid
from shade import openstack_cloud
from shade.tests import base
from shade.tests.functional.util import pick_image
class TestCompute(base.TestCase):
def setUp(self):
super(TestCompute, self).setUp()
# Shell should have OS-* envvars from openrc, typically loaded by job
self.cloud = openstack_cloud()
self.image = pick_image(self.cloud.nova_client.images.list())
def test_create_image(self):
test_image = tempfile.NamedTemporaryFile(delete=False)
test_image.write('\0' * 1024 * 1024)
test_image.close()
image_name = 'test-image-%s' % uuid.uuid4()
try:
self.cloud.create_image(name=image_name,
filename=test_image.name,
disk_format='raw',
container_format='bare',
wait=True)
finally:
self.cloud.delete_image(image_name, wait=True)
|
<commit_before><commit_msg>Add functional tests for create_image
Change-Id: Iadb70ca764fbc2c8102a988d6e03cf623b6df48d<commit_after># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_compute
----------------------------------
Functional tests for `shade` image methods.
"""
import tempfile
import uuid
from shade import openstack_cloud
from shade.tests import base
from shade.tests.functional.util import pick_image
class TestCompute(base.TestCase):
def setUp(self):
super(TestCompute, self).setUp()
# Shell should have OS-* envvars from openrc, typically loaded by job
self.cloud = openstack_cloud()
self.image = pick_image(self.cloud.nova_client.images.list())
def test_create_image(self):
test_image = tempfile.NamedTemporaryFile(delete=False)
test_image.write('\0' * 1024 * 1024)
test_image.close()
image_name = 'test-image-%s' % uuid.uuid4()
try:
self.cloud.create_image(name=image_name,
filename=test_image.name,
disk_format='raw',
container_format='bare',
wait=True)
finally:
self.cloud.delete_image(image_name, wait=True)
|
|
9c045f7667e1bdc6c9137c3877292907f4623774
|
make_a_plea/management/commands/check_urns_in_db.py
|
make_a_plea/management/commands/check_urns_in_db.py
|
import csv
from django.core.management.base import BaseCommand
from apps.plea.models import DataValidation, Case
from apps.plea.standardisers import standardise_urn, format_for_region
class Command(BaseCommand):
help = "Build weekly aggregate stats"
def add_arguments(self, parser):
parser.add_argument('csv_file', nargs='+')
def handle(self, *args, **options):
with open(options['csv_file'][0]) as csvfile:
total_matched, total_missed, matched, missed = 0, 0, 0, 0
for row in csvfile.readlines():
if not row.strip():
continue
elif row.startswith("#"):
if matched > 0 or missed > 0:
print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed)
total_matched += matched
total_missed += missed
matched = 0
missed = 0
print row
else:
urn = standardise_urn(row)
if Case.objects.filter(urn__iexact=urn).exists():
matched += 1
else:
missed += 1
print "{} - failed".format(urn)
print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
|
Add a management command to check if URNs are present in the database
|
Add a management command to check if URNs are present in the database
|
Python
|
mit
|
ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas
|
Add a management command to check if URNs are present in the database
|
import csv
from django.core.management.base import BaseCommand
from apps.plea.models import DataValidation, Case
from apps.plea.standardisers import standardise_urn, format_for_region
class Command(BaseCommand):
help = "Build weekly aggregate stats"
def add_arguments(self, parser):
parser.add_argument('csv_file', nargs='+')
def handle(self, *args, **options):
with open(options['csv_file'][0]) as csvfile:
total_matched, total_missed, matched, missed = 0, 0, 0, 0
for row in csvfile.readlines():
if not row.strip():
continue
elif row.startswith("#"):
if matched > 0 or missed > 0:
print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed)
total_matched += matched
total_missed += missed
matched = 0
missed = 0
print row
else:
urn = standardise_urn(row)
if Case.objects.filter(urn__iexact=urn).exists():
matched += 1
else:
missed += 1
print "{} - failed".format(urn)
print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
|
<commit_before><commit_msg>Add a management command to check if URNs are present in the database<commit_after>
|
import csv
from django.core.management.base import BaseCommand
from apps.plea.models import DataValidation, Case
from apps.plea.standardisers import standardise_urn, format_for_region
class Command(BaseCommand):
help = "Build weekly aggregate stats"
def add_arguments(self, parser):
parser.add_argument('csv_file', nargs='+')
def handle(self, *args, **options):
with open(options['csv_file'][0]) as csvfile:
total_matched, total_missed, matched, missed = 0, 0, 0, 0
for row in csvfile.readlines():
if not row.strip():
continue
elif row.startswith("#"):
if matched > 0 or missed > 0:
print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed)
total_matched += matched
total_missed += missed
matched = 0
missed = 0
print row
else:
urn = standardise_urn(row)
if Case.objects.filter(urn__iexact=urn).exists():
matched += 1
else:
missed += 1
print "{} - failed".format(urn)
print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
|
Add a management command to check if URNs are present in the databaseimport csv
from django.core.management.base import BaseCommand
from apps.plea.models import DataValidation, Case
from apps.plea.standardisers import standardise_urn, format_for_region
class Command(BaseCommand):
help = "Build weekly aggregate stats"
def add_arguments(self, parser):
parser.add_argument('csv_file', nargs='+')
def handle(self, *args, **options):
with open(options['csv_file'][0]) as csvfile:
total_matched, total_missed, matched, missed = 0, 0, 0, 0
for row in csvfile.readlines():
if not row.strip():
continue
elif row.startswith("#"):
if matched > 0 or missed > 0:
print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed)
total_matched += matched
total_missed += missed
matched = 0
missed = 0
print row
else:
urn = standardise_urn(row)
if Case.objects.filter(urn__iexact=urn).exists():
matched += 1
else:
missed += 1
print "{} - failed".format(urn)
print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
|
<commit_before><commit_msg>Add a management command to check if URNs are present in the database<commit_after>import csv
from django.core.management.base import BaseCommand
from apps.plea.models import DataValidation, Case
from apps.plea.standardisers import standardise_urn, format_for_region
class Command(BaseCommand):
help = "Build weekly aggregate stats"
def add_arguments(self, parser):
parser.add_argument('csv_file', nargs='+')
def handle(self, *args, **options):
with open(options['csv_file'][0]) as csvfile:
total_matched, total_missed, matched, missed = 0, 0, 0, 0
for row in csvfile.readlines():
if not row.strip():
continue
elif row.startswith("#"):
if matched > 0 or missed > 0:
print "----------------\nMatched {}\nMissed {}\n\n".format(matched, missed)
total_matched += matched
total_missed += missed
matched = 0
missed = 0
print row
else:
urn = standardise_urn(row)
if Case.objects.filter(urn__iexact=urn).exists():
matched += 1
else:
missed += 1
print "{} - failed".format(urn)
print "----------------\nTotal:\nMatched {}\nMissed {}".format(total_matched, total_missed)
|
|
d6e9971ceefc69f0eefc7440cc5e7035e7dcc05d
|
contentcuration/contentcuration/middleware/ErrorReportingMiddleware.py
|
contentcuration/contentcuration/middleware/ErrorReportingMiddleware.py
|
from google.cloud import error_reporting
class ErrorReportingMiddleware(object):
def __init__(self, *args, **kwargs):
self.client = error_reporting.Client()
def process_exception(self, request, exception):
self.client.report_exception()
|
Add the middleware for reporting errors to gcloud.
|
Add the middleware for reporting errors to gcloud.
|
Python
|
mit
|
fle-internal/content-curation,jonboiser/content-curation,jonboiser/content-curation,jayoshih/content-curation,aronasorman/content-curation,jayoshih/content-curation,fle-internal/content-curation,aronasorman/content-curation,jayoshih/content-curation,DXCanas/content-curation,jonboiser/content-curation,fle-internal/content-curation,DXCanas/content-curation,jonboiser/content-curation,fle-internal/content-curation,DXCanas/content-curation,DXCanas/content-curation,aronasorman/content-curation,jayoshih/content-curation
|
Add the middleware for reporting errors to gcloud.
|
from google.cloud import error_reporting
class ErrorReportingMiddleware(object):
def __init__(self, *args, **kwargs):
self.client = error_reporting.Client()
def process_exception(self, request, exception):
self.client.report_exception()
|
<commit_before><commit_msg>Add the middleware for reporting errors to gcloud.<commit_after>
|
from google.cloud import error_reporting
class ErrorReportingMiddleware(object):
def __init__(self, *args, **kwargs):
self.client = error_reporting.Client()
def process_exception(self, request, exception):
self.client.report_exception()
|
Add the middleware for reporting errors to gcloud.from google.cloud import error_reporting
class ErrorReportingMiddleware(object):
def __init__(self, *args, **kwargs):
self.client = error_reporting.Client()
def process_exception(self, request, exception):
self.client.report_exception()
|
<commit_before><commit_msg>Add the middleware for reporting errors to gcloud.<commit_after>from google.cloud import error_reporting
class ErrorReportingMiddleware(object):
def __init__(self, *args, **kwargs):
self.client = error_reporting.Client()
def process_exception(self, request, exception):
self.client.report_exception()
|
|
40f92e6293bb13ee1462b932be15f5f11ceeee74
|
compiler/infer.py
|
compiler/infer.py
|
"""
# ----------------------------------------------------------------------
# infer.py
#
# Type inference for Llama
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Nick Korasidis <renelvon@gmail.com>
# Dimitris Koutsoukos <dim.kou.shmmy@gmail.com>
# ----------------------------------------------------------------------
"""
class TempType:
"""A temporary type used during inference."""
_next_free = 1 # Next free papaki.
@classmethod
def _get_next_tag(cls):
cls._next_free += 1
return cls._next_free
def __init__(self, node, spec_type=None):
"""
Construct a new temporary type for node `node`.
The user may optionally supply a type for this node;
such a specification is not binding but will improve
error reporting.
"""
self._node = node
self._spec_type = spec_type
self._inferred_type = None
self._tag = self._get_next_tag()
def write_back(self):
self._node.type = self._inferred_type
# TODO: Validate the type before returning.
|
Add initial implementation of TempType.
|
Infer: Add initial implementation of TempType.
* This will be used to supply types to all type-bearing nodes during
type inference.
|
Python
|
mit
|
Renelvon/llama,Renelvon/llama
|
Infer: Add initial implementation of TempType.
* This will be used to supply types to all type-bearing nodes during
type inference.
|
"""
# ----------------------------------------------------------------------
# infer.py
#
# Type inference for Llama
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Nick Korasidis <renelvon@gmail.com>
# Dimitris Koutsoukos <dim.kou.shmmy@gmail.com>
# ----------------------------------------------------------------------
"""
class TempType:
"""A temporary type used during inference."""
_next_free = 1 # Next free papaki.
@classmethod
def _get_next_tag(cls):
cls._next_free += 1
return cls._next_free
def __init__(self, node, spec_type=None):
"""
Construct a new temporary type for node `node`.
The user may optionally supply a type for this node;
such a specification is not binding but will improve
error reporting.
"""
self._node = node
self._spec_type = spec_type
self._inferred_type = None
self._tag = self._get_next_tag()
def write_back(self):
self._node.type = self._inferred_type
# TODO: Validate the type before returning.
|
<commit_before><commit_msg>Infer: Add initial implementation of TempType.
* This will be used to supply types to all type-bearing nodes during
type inference.<commit_after>
|
"""
# ----------------------------------------------------------------------
# infer.py
#
# Type inference for Llama
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Nick Korasidis <renelvon@gmail.com>
# Dimitris Koutsoukos <dim.kou.shmmy@gmail.com>
# ----------------------------------------------------------------------
"""
class TempType:
"""A temporary type used during inference."""
_next_free = 1 # Next free papaki.
@classmethod
def _get_next_tag(cls):
cls._next_free += 1
return cls._next_free
def __init__(self, node, spec_type=None):
"""
Construct a new temporary type for node `node`.
The user may optionally supply a type for this node;
such a specification is not binding but will improve
error reporting.
"""
self._node = node
self._spec_type = spec_type
self._inferred_type = None
self._tag = self._get_next_tag()
def write_back(self):
self._node.type = self._inferred_type
# TODO: Validate the type before returning.
|
Infer: Add initial implementation of TempType.
* This will be used to supply types to all type-bearing nodes during
type inference."""
# ----------------------------------------------------------------------
# infer.py
#
# Type inference for Llama
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Nick Korasidis <renelvon@gmail.com>
# Dimitris Koutsoukos <dim.kou.shmmy@gmail.com>
# ----------------------------------------------------------------------
"""
class TempType:
"""A temporary type used during inference."""
_next_free = 1 # Next free papaki.
@classmethod
def _get_next_tag(cls):
cls._next_free += 1
return cls._next_free
def __init__(self, node, spec_type=None):
"""
Construct a new temporary type for node `node`.
The user may optionally supply a type for this node;
such a specification is not binding but will improve
error reporting.
"""
self._node = node
self._spec_type = spec_type
self._inferred_type = None
self._tag = self._get_next_tag()
def write_back(self):
self._node.type = self._inferred_type
# TODO: Validate the type before returning.
|
<commit_before><commit_msg>Infer: Add initial implementation of TempType.
* This will be used to supply types to all type-bearing nodes during
type inference.<commit_after>"""
# ----------------------------------------------------------------------
# infer.py
#
# Type inference for Llama
# http://courses.softlab.ntua.gr/compilers/2012a/llama2012.pdf
#
# Authors: Nick Korasidis <renelvon@gmail.com>
# Dimitris Koutsoukos <dim.kou.shmmy@gmail.com>
# ----------------------------------------------------------------------
"""
class TempType:
"""A temporary type used during inference."""
_next_free = 1 # Next free papaki.
@classmethod
def _get_next_tag(cls):
cls._next_free += 1
return cls._next_free
def __init__(self, node, spec_type=None):
"""
Construct a new temporary type for node `node`.
The user may optionally supply a type for this node;
such a specification is not binding but will improve
error reporting.
"""
self._node = node
self._spec_type = spec_type
self._inferred_type = None
self._tag = self._get_next_tag()
def write_back(self):
self._node.type = self._inferred_type
# TODO: Validate the type before returning.
|
|
133da92ed69aafc6c0a8d4466cf3b0266c5edc68
|
userprofile/migrations/0006_auto_20180309_2215.py
|
userprofile/migrations/0006_auto_20180309_2215.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-09 22:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0005_auto_20171121_1923'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default=None, upload_to='profilepictures'),
),
]
|
Add migration for change in profile model.
|
Add migration for change in profile model.
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
Add migration for change in profile model.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-09 22:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0005_auto_20171121_1923'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default=None, upload_to='profilepictures'),
),
]
|
<commit_before><commit_msg>Add migration for change in profile model.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-09 22:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0005_auto_20171121_1923'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default=None, upload_to='profilepictures'),
),
]
|
Add migration for change in profile model.# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-09 22:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0005_auto_20171121_1923'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default=None, upload_to='profilepictures'),
),
]
|
<commit_before><commit_msg>Add migration for change in profile model.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-03-09 22:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0005_auto_20171121_1923'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default=None, upload_to='profilepictures'),
),
]
|
|
f3db6608c2b4afeb214c3f1b94e0175609ad0b88
|
cs4teachers/events/migrations/0018_auto_20170706_0803.py
|
cs4teachers/events/migrations/0018_auto_20170706_0803.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-06 08:03
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0017_auto_20170705_0952'),
]
operations = [
migrations.AlterField(
model_name='event',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='location',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='resource',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='session',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique_with=['event__slug']),
),
migrations.AlterField(
model_name='thirdpartyevent',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
]
|
Add migration file for event slug changes
|
Add migration file for event slug changes
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
Add migration file for event slug changes
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-06 08:03
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0017_auto_20170705_0952'),
]
operations = [
migrations.AlterField(
model_name='event',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='location',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='resource',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='session',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique_with=['event__slug']),
),
migrations.AlterField(
model_name='thirdpartyevent',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
]
|
<commit_before><commit_msg>Add migration file for event slug changes<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-06 08:03
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0017_auto_20170705_0952'),
]
operations = [
migrations.AlterField(
model_name='event',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='location',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='resource',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='session',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique_with=['event__slug']),
),
migrations.AlterField(
model_name='thirdpartyevent',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
]
|
Add migration file for event slug changes# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-06 08:03
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0017_auto_20170705_0952'),
]
operations = [
migrations.AlterField(
model_name='event',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='location',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='resource',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='session',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique_with=['event__slug']),
),
migrations.AlterField(
model_name='thirdpartyevent',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
]
|
<commit_before><commit_msg>Add migration file for event slug changes<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-06 08:03
from __future__ import unicode_literals
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0017_auto_20170705_0952'),
]
operations = [
migrations.AlterField(
model_name='event',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='location',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='resource',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
migrations.AlterField(
model_name='session',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name', unique_with=['event__slug']),
),
migrations.AlterField(
model_name='thirdpartyevent',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from='name'),
),
]
|
|
8cf5b328d7596a9b74490b7dfd4a1b8aa1577b55
|
accelerator/migrations/0110_remove_bucket_list_program_role_20220707_1001.py
|
accelerator/migrations/0110_remove_bucket_list_program_role_20220707_1001.py
|
from django.db import migrations
def remove_bucket_list_program_roles(apps, schema_editor):
BucketState = apps.get_model('accelerator', 'BucketState')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
ProgramRoleGrant = apps.get_model('accelerator', 'ProgramRoleGrant')
NodePublishedFor = apps.get_model('accelerator', 'NodePublishedFor')
program_role_ids = BucketState.objects.values_list('program_role_id',
flat=True)
NodePublishedFor.objects.filter(
published_for_id__in=program_role_ids).delete()
ProgramRoleGrant.objects.filter(
program_role_id__in=program_role_ids).delete()
BucketState.objects.all().delete()
ProgramRole.objects.filter(pk__in=program_role_ids).delete()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0109_remove_interest_fields_20220705_0425'),
]
operations = [
migrations.RunPython(remove_bucket_list_program_roles,
migrations.RunPython.noop)
]
|
Merge remote-tracking branch 'origin' into AC-9512
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512
|
from django.db import migrations
def remove_bucket_list_program_roles(apps, schema_editor):
BucketState = apps.get_model('accelerator', 'BucketState')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
ProgramRoleGrant = apps.get_model('accelerator', 'ProgramRoleGrant')
NodePublishedFor = apps.get_model('accelerator', 'NodePublishedFor')
program_role_ids = BucketState.objects.values_list('program_role_id',
flat=True)
NodePublishedFor.objects.filter(
published_for_id__in=program_role_ids).delete()
ProgramRoleGrant.objects.filter(
program_role_id__in=program_role_ids).delete()
BucketState.objects.all().delete()
ProgramRole.objects.filter(pk__in=program_role_ids).delete()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0109_remove_interest_fields_20220705_0425'),
]
operations = [
migrations.RunPython(remove_bucket_list_program_roles,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>[AC-9512] Merge remote-tracking branch 'origin' into AC-9512<commit_after>
|
from django.db import migrations
def remove_bucket_list_program_roles(apps, schema_editor):
BucketState = apps.get_model('accelerator', 'BucketState')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
ProgramRoleGrant = apps.get_model('accelerator', 'ProgramRoleGrant')
NodePublishedFor = apps.get_model('accelerator', 'NodePublishedFor')
program_role_ids = BucketState.objects.values_list('program_role_id',
flat=True)
NodePublishedFor.objects.filter(
published_for_id__in=program_role_ids).delete()
ProgramRoleGrant.objects.filter(
program_role_id__in=program_role_ids).delete()
BucketState.objects.all().delete()
ProgramRole.objects.filter(pk__in=program_role_ids).delete()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0109_remove_interest_fields_20220705_0425'),
]
operations = [
migrations.RunPython(remove_bucket_list_program_roles,
migrations.RunPython.noop)
]
|
[AC-9512] Merge remote-tracking branch 'origin' into AC-9512from django.db import migrations
def remove_bucket_list_program_roles(apps, schema_editor):
BucketState = apps.get_model('accelerator', 'BucketState')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
ProgramRoleGrant = apps.get_model('accelerator', 'ProgramRoleGrant')
NodePublishedFor = apps.get_model('accelerator', 'NodePublishedFor')
program_role_ids = BucketState.objects.values_list('program_role_id',
flat=True)
NodePublishedFor.objects.filter(
published_for_id__in=program_role_ids).delete()
ProgramRoleGrant.objects.filter(
program_role_id__in=program_role_ids).delete()
BucketState.objects.all().delete()
ProgramRole.objects.filter(pk__in=program_role_ids).delete()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0109_remove_interest_fields_20220705_0425'),
]
operations = [
migrations.RunPython(remove_bucket_list_program_roles,
migrations.RunPython.noop)
]
|
<commit_before><commit_msg>[AC-9512] Merge remote-tracking branch 'origin' into AC-9512<commit_after>from django.db import migrations
def remove_bucket_list_program_roles(apps, schema_editor):
BucketState = apps.get_model('accelerator', 'BucketState')
ProgramRole = apps.get_model('accelerator', 'ProgramRole')
ProgramRoleGrant = apps.get_model('accelerator', 'ProgramRoleGrant')
NodePublishedFor = apps.get_model('accelerator', 'NodePublishedFor')
program_role_ids = BucketState.objects.values_list('program_role_id',
flat=True)
NodePublishedFor.objects.filter(
published_for_id__in=program_role_ids).delete()
ProgramRoleGrant.objects.filter(
program_role_id__in=program_role_ids).delete()
BucketState.objects.all().delete()
ProgramRole.objects.filter(pk__in=program_role_ids).delete()
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0109_remove_interest_fields_20220705_0425'),
]
operations = [
migrations.RunPython(remove_bucket_list_program_roles,
migrations.RunPython.noop)
]
|
|
98295608a2ba4519d12212532380253bba4372ed
|
scripts/frequency_analysis.py
|
scripts/frequency_analysis.py
|
import asyncio
import attr
import pprint
import dateutil.parser
from datetime import timedelta
from bobsled.core import bobsled
from bobsled.base import Status
def recommend_frequency_for_task(runs):
total_duration = timedelta(seconds=0)
longest_duration = timedelta(seconds=0)
for run in runs:
start = dateutil.parser.parse(run.start)
end = dateutil.parser.parse(run.end)
duration = end - start
total_duration += duration
if duration > longest_duration:
longest_duration = duration
average = total_duration / len(runs)
if longest_duration.seconds <= 60*10:
return '0 */2 * * ?'
elif longest_duration.seconds <= 60*60:
return '0 */6 * * ?'
else:
return 'daily'
async def analyze_frequency():
await bobsled.initialize()
tasks = [attr.asdict(t) for t in await bobsled.storage.get_tasks()]
results = await asyncio.gather(
*[bobsled.run.get_runs(task_name=t["name"], latest=4) for t in tasks]
)
recommendations = []
for task, latest_runs in zip(tasks, results):
# make recommendations for scrape tasks that have runs
if latest_runs and '-scrape' in task['name']:
if all(run.status is Status.Success for run in latest_runs):
recommendation = recommend_frequency_for_task(latest_runs)
else:
# a recent run failed, made a note of that
recommendation = 'n/a - at least one recent task failed'
recommendations.append({
'task': task['name'],
'current_schedule': task['triggers'][0]['cron'],
'recommended': recommendation
})
changed_recommendations = []
for recommendation in recommendations:
if recommendation['recommended'] != 'daily' and 'n/a' not in recommendation['recommended']\
and recommendation['current_schedule'] != recommendation['recommended']:
changed_recommendations.append(recommendation)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(changed_recommendations)
def main():
# asyncio.run(bobsled.initialize()) # this makes a threading problem if it's here
asyncio.run(analyze_frequency())
if __name__ == "__main__":
main()
|
Add script that recommends scrape task schedule based on recent run timings
|
Add script that recommends scrape task schedule based on recent run timings
|
Python
|
mit
|
openstates/bobsled,openstates/bobsled,openstates/bobsled,openstates/bobsled
|
Add script that recommends scrape task schedule based on recent run timings
|
import asyncio
import attr
import pprint
import dateutil.parser
from datetime import timedelta
from bobsled.core import bobsled
from bobsled.base import Status
def recommend_frequency_for_task(runs):
total_duration = timedelta(seconds=0)
longest_duration = timedelta(seconds=0)
for run in runs:
start = dateutil.parser.parse(run.start)
end = dateutil.parser.parse(run.end)
duration = end - start
total_duration += duration
if duration > longest_duration:
longest_duration = duration
average = total_duration / len(runs)
if longest_duration.seconds <= 60*10:
return '0 */2 * * ?'
elif longest_duration.seconds <= 60*60:
return '0 */6 * * ?'
else:
return 'daily'
async def analyze_frequency():
await bobsled.initialize()
tasks = [attr.asdict(t) for t in await bobsled.storage.get_tasks()]
results = await asyncio.gather(
*[bobsled.run.get_runs(task_name=t["name"], latest=4) for t in tasks]
)
recommendations = []
for task, latest_runs in zip(tasks, results):
# make recommendations for scrape tasks that have runs
if latest_runs and '-scrape' in task['name']:
if all(run.status is Status.Success for run in latest_runs):
recommendation = recommend_frequency_for_task(latest_runs)
else:
# a recent run failed, made a note of that
recommendation = 'n/a - at least one recent task failed'
recommendations.append({
'task': task['name'],
'current_schedule': task['triggers'][0]['cron'],
'recommended': recommendation
})
changed_recommendations = []
for recommendation in recommendations:
if recommendation['recommended'] != 'daily' and 'n/a' not in recommendation['recommended']\
and recommendation['current_schedule'] != recommendation['recommended']:
changed_recommendations.append(recommendation)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(changed_recommendations)
def main():
# asyncio.run(bobsled.initialize()) # this makes a threading problem if it's here
asyncio.run(analyze_frequency())
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that recommends scrape task schedule based on recent run timings<commit_after>
|
import asyncio
import attr
import pprint
import dateutil.parser
from datetime import timedelta
from bobsled.core import bobsled
from bobsled.base import Status
def recommend_frequency_for_task(runs):
total_duration = timedelta(seconds=0)
longest_duration = timedelta(seconds=0)
for run in runs:
start = dateutil.parser.parse(run.start)
end = dateutil.parser.parse(run.end)
duration = end - start
total_duration += duration
if duration > longest_duration:
longest_duration = duration
average = total_duration / len(runs)
if longest_duration.seconds <= 60*10:
return '0 */2 * * ?'
elif longest_duration.seconds <= 60*60:
return '0 */6 * * ?'
else:
return 'daily'
async def analyze_frequency():
await bobsled.initialize()
tasks = [attr.asdict(t) for t in await bobsled.storage.get_tasks()]
results = await asyncio.gather(
*[bobsled.run.get_runs(task_name=t["name"], latest=4) for t in tasks]
)
recommendations = []
for task, latest_runs in zip(tasks, results):
# make recommendations for scrape tasks that have runs
if latest_runs and '-scrape' in task['name']:
if all(run.status is Status.Success for run in latest_runs):
recommendation = recommend_frequency_for_task(latest_runs)
else:
# a recent run failed, made a note of that
recommendation = 'n/a - at least one recent task failed'
recommendations.append({
'task': task['name'],
'current_schedule': task['triggers'][0]['cron'],
'recommended': recommendation
})
changed_recommendations = []
for recommendation in recommendations:
if recommendation['recommended'] != 'daily' and 'n/a' not in recommendation['recommended']\
and recommendation['current_schedule'] != recommendation['recommended']:
changed_recommendations.append(recommendation)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(changed_recommendations)
def main():
# asyncio.run(bobsled.initialize()) # this makes a threading problem if it's here
asyncio.run(analyze_frequency())
if __name__ == "__main__":
main()
|
Add script that recommends scrape task schedule based on recent run timingsimport asyncio
import attr
import pprint
import dateutil.parser
from datetime import timedelta
from bobsled.core import bobsled
from bobsled.base import Status
def recommend_frequency_for_task(runs):
total_duration = timedelta(seconds=0)
longest_duration = timedelta(seconds=0)
for run in runs:
start = dateutil.parser.parse(run.start)
end = dateutil.parser.parse(run.end)
duration = end - start
total_duration += duration
if duration > longest_duration:
longest_duration = duration
average = total_duration / len(runs)
if longest_duration.seconds <= 60*10:
return '0 */2 * * ?'
elif longest_duration.seconds <= 60*60:
return '0 */6 * * ?'
else:
return 'daily'
async def analyze_frequency():
await bobsled.initialize()
tasks = [attr.asdict(t) for t in await bobsled.storage.get_tasks()]
results = await asyncio.gather(
*[bobsled.run.get_runs(task_name=t["name"], latest=4) for t in tasks]
)
recommendations = []
for task, latest_runs in zip(tasks, results):
# make recommendations for scrape tasks that have runs
if latest_runs and '-scrape' in task['name']:
if all(run.status is Status.Success for run in latest_runs):
recommendation = recommend_frequency_for_task(latest_runs)
else:
# a recent run failed, made a note of that
recommendation = 'n/a - at least one recent task failed'
recommendations.append({
'task': task['name'],
'current_schedule': task['triggers'][0]['cron'],
'recommended': recommendation
})
changed_recommendations = []
for recommendation in recommendations:
if recommendation['recommended'] != 'daily' and 'n/a' not in recommendation['recommended']\
and recommendation['current_schedule'] != recommendation['recommended']:
changed_recommendations.append(recommendation)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(changed_recommendations)
def main():
# asyncio.run(bobsled.initialize()) # this makes a threading problem if it's here
asyncio.run(analyze_frequency())
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that recommends scrape task schedule based on recent run timings<commit_after>import asyncio
import attr
import pprint
import dateutil.parser
from datetime import timedelta
from bobsled.core import bobsled
from bobsled.base import Status
def recommend_frequency_for_task(runs):
total_duration = timedelta(seconds=0)
longest_duration = timedelta(seconds=0)
for run in runs:
start = dateutil.parser.parse(run.start)
end = dateutil.parser.parse(run.end)
duration = end - start
total_duration += duration
if duration > longest_duration:
longest_duration = duration
average = total_duration / len(runs)
if longest_duration.seconds <= 60*10:
return '0 */2 * * ?'
elif longest_duration.seconds <= 60*60:
return '0 */6 * * ?'
else:
return 'daily'
async def analyze_frequency():
await bobsled.initialize()
tasks = [attr.asdict(t) for t in await bobsled.storage.get_tasks()]
results = await asyncio.gather(
*[bobsled.run.get_runs(task_name=t["name"], latest=4) for t in tasks]
)
recommendations = []
for task, latest_runs in zip(tasks, results):
# make recommendations for scrape tasks that have runs
if latest_runs and '-scrape' in task['name']:
if all(run.status is Status.Success for run in latest_runs):
recommendation = recommend_frequency_for_task(latest_runs)
else:
# a recent run failed, made a note of that
recommendation = 'n/a - at least one recent task failed'
recommendations.append({
'task': task['name'],
'current_schedule': task['triggers'][0]['cron'],
'recommended': recommendation
})
changed_recommendations = []
for recommendation in recommendations:
if recommendation['recommended'] != 'daily' and 'n/a' not in recommendation['recommended']\
and recommendation['current_schedule'] != recommendation['recommended']:
changed_recommendations.append(recommendation)
pp = pprint.PrettyPrinter(indent=2)
pp.pprint(changed_recommendations)
def main():
# asyncio.run(bobsled.initialize()) # this makes a threading problem if it's here
asyncio.run(analyze_frequency())
if __name__ == "__main__":
main()
|
|
6aef9ab419b09822b2255141349144ac8978e862
|
kolibri/core/content/migrations/0025_add_h5p_kind.py
|
kolibri/core/content/migrations/0025_add_h5p_kind.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-19 02:29
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("content", "0024_channelmetadata_public"),
]
operations = [
migrations.AlterField(
model_name="contentnode",
name="kind",
field=models.CharField(
blank=True,
choices=[
("topic", "Topic"),
("video", "Video"),
("audio", "Audio"),
("exercise", "Exercise"),
("document", "Document"),
("html5", "HTML5 App"),
("slideshow", "Slideshow"),
("h5p", "H5P"),
],
max_length=200,
),
),
]
|
Add migration for h5p kind.
|
Add migration for h5p kind.
|
Python
|
mit
|
mrpau/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,indirectlylit/kolibri,learningequality/kolibri
|
Add migration for h5p kind.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-19 02:29
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("content", "0024_channelmetadata_public"),
]
operations = [
migrations.AlterField(
model_name="contentnode",
name="kind",
field=models.CharField(
blank=True,
choices=[
("topic", "Topic"),
("video", "Video"),
("audio", "Audio"),
("exercise", "Exercise"),
("document", "Document"),
("html5", "HTML5 App"),
("slideshow", "Slideshow"),
("h5p", "H5P"),
],
max_length=200,
),
),
]
|
<commit_before><commit_msg>Add migration for h5p kind.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-19 02:29
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("content", "0024_channelmetadata_public"),
]
operations = [
migrations.AlterField(
model_name="contentnode",
name="kind",
field=models.CharField(
blank=True,
choices=[
("topic", "Topic"),
("video", "Video"),
("audio", "Audio"),
("exercise", "Exercise"),
("document", "Document"),
("html5", "HTML5 App"),
("slideshow", "Slideshow"),
("h5p", "H5P"),
],
max_length=200,
),
),
]
|
Add migration for h5p kind.# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-19 02:29
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("content", "0024_channelmetadata_public"),
]
operations = [
migrations.AlterField(
model_name="contentnode",
name="kind",
field=models.CharField(
blank=True,
choices=[
("topic", "Topic"),
("video", "Video"),
("audio", "Audio"),
("exercise", "Exercise"),
("document", "Document"),
("html5", "HTML5 App"),
("slideshow", "Slideshow"),
("h5p", "H5P"),
],
max_length=200,
),
),
]
|
<commit_before><commit_msg>Add migration for h5p kind.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-12-19 02:29
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("content", "0024_channelmetadata_public"),
]
operations = [
migrations.AlterField(
model_name="contentnode",
name="kind",
field=models.CharField(
blank=True,
choices=[
("topic", "Topic"),
("video", "Video"),
("audio", "Audio"),
("exercise", "Exercise"),
("document", "Document"),
("html5", "HTML5 App"),
("slideshow", "Slideshow"),
("h5p", "H5P"),
],
max_length=200,
),
),
]
|
|
ecfadf8478b8775d8579812a7bd835f6ebb1ffd4
|
util/rclone-list-files.py
|
util/rclone-list-files.py
|
#!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
|
Add file lister for rclone export
|
Add file lister for rclone export
|
Python
|
bsd-3-clause
|
jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar
|
Add file lister for rclone export
|
#!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
|
<commit_before><commit_msg>Add file lister for rclone export<commit_after>
|
#!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
|
Add file lister for rclone export#!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
|
<commit_before><commit_msg>Add file lister for rclone export<commit_after>#!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
|
|
499adce8b5c23d60073d4c92259e611609ee0c61
|
states/common/maven/artifacts/check_dependencies.py
|
states/common/maven/artifacts/check_dependencies.py
|
#!/usr/bin/env python
import subprocess as sub
import yaml
import re
distrib_pom_path = '/home/uvsmtid/Works/maritime-singapore.git/clearsea-distribution/pom.xml'
# Resolve (download) all dependencies locally so that next command
# can work offline.
sub.check_call(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:resolve',
],
)
# Get list of all dependencies.
p = sub.Popen(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:list',
],
stdout = sub.PIPE,
)
# Select lines with dependency items.
artifact_regex = re.compile(')
for line in p.stdout:
|
Add initial draft script to analyse Maven deps
|
Add initial draft script to analyse Maven deps
|
Python
|
apache-2.0
|
uvsmtid/common-salt-states,uvsmtid/common-salt-states,uvsmtid/common-salt-states,uvsmtid/common-salt-states
|
Add initial draft script to analyse Maven deps
|
#!/usr/bin/env python
import subprocess as sub
import yaml
import re
distrib_pom_path = '/home/uvsmtid/Works/maritime-singapore.git/clearsea-distribution/pom.xml'
# Resolve (download) all dependencies locally so that next command
# can work offline.
sub.check_call(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:resolve',
],
)
# Get list of all dependencies.
p = sub.Popen(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:list',
],
stdout = sub.PIPE,
)
# Select lines with dependency items.
artifact_regex = re.compile(')
for line in p.stdout:
|
<commit_before><commit_msg>Add initial draft script to analyse Maven deps<commit_after>
|
#!/usr/bin/env python
import subprocess as sub
import yaml
import re
distrib_pom_path = '/home/uvsmtid/Works/maritime-singapore.git/clearsea-distribution/pom.xml'
# Resolve (download) all dependencies locally so that next command
# can work offline.
sub.check_call(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:resolve',
],
)
# Get list of all dependencies.
p = sub.Popen(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:list',
],
stdout = sub.PIPE,
)
# Select lines with dependency items.
artifact_regex = re.compile(')
for line in p.stdout:
|
Add initial draft script to analyse Maven deps#!/usr/bin/env python
import subprocess as sub
import yaml
import re
distrib_pom_path = '/home/uvsmtid/Works/maritime-singapore.git/clearsea-distribution/pom.xml'
# Resolve (download) all dependencies locally so that next command
# can work offline.
sub.check_call(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:resolve',
],
)
# Get list of all dependencies.
p = sub.Popen(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:list',
],
stdout = sub.PIPE,
)
# Select lines with dependency items.
artifact_regex = re.compile(')
for line in p.stdout:
|
<commit_before><commit_msg>Add initial draft script to analyse Maven deps<commit_after>#!/usr/bin/env python
import subprocess as sub
import yaml
import re
distrib_pom_path = '/home/uvsmtid/Works/maritime-singapore.git/clearsea-distribution/pom.xml'
# Resolve (download) all dependencies locally so that next command
# can work offline.
sub.check_call(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:resolve',
],
)
# Get list of all dependencies.
p = sub.Popen(
[
'mvn',
'-f',
distrib_pom_path,
'dependency:list',
],
stdout = sub.PIPE,
)
# Select lines with dependency items.
artifact_regex = re.compile(')
for line in p.stdout:
|
|
6fd75772efac321517a1d8c01addfa5cbbf7caf0
|
tests/db/user_test.py
|
tests/db/user_test.py
|
from okcupyd.db import user
def test_have_messaged_before(T):
message_thread_model = T.factory.message_thread()
assert user.have_messaged_by_username(
message_thread_model.initiator.handle,
message_thread_model.respondent.handle
)
assert user.have_messaged_by_username(
message_thread_model.respondent.handle,
message_thread_model.initiator.handle
)
assert not user.have_messaged_by_username('a', 'b')
assert not user.have_messaged_by_username(
message_thread_model.respondent.handle, 'a'
)
T.factory.user('b')
assert not user.have_messaged_by_username(
'b', message_thread_model.initiator.handle
)
|
Add test file for user functions.
|
Add test file for user functions.
|
Python
|
mit
|
IvanMalison/okcupyd,okuser/okcupyd,IvanMalison/okcupyd,okuser/okcupyd
|
Add test file for user functions.
|
from okcupyd.db import user
def test_have_messaged_before(T):
message_thread_model = T.factory.message_thread()
assert user.have_messaged_by_username(
message_thread_model.initiator.handle,
message_thread_model.respondent.handle
)
assert user.have_messaged_by_username(
message_thread_model.respondent.handle,
message_thread_model.initiator.handle
)
assert not user.have_messaged_by_username('a', 'b')
assert not user.have_messaged_by_username(
message_thread_model.respondent.handle, 'a'
)
T.factory.user('b')
assert not user.have_messaged_by_username(
'b', message_thread_model.initiator.handle
)
|
<commit_before><commit_msg>Add test file for user functions.<commit_after>
|
from okcupyd.db import user
def test_have_messaged_before(T):
message_thread_model = T.factory.message_thread()
assert user.have_messaged_by_username(
message_thread_model.initiator.handle,
message_thread_model.respondent.handle
)
assert user.have_messaged_by_username(
message_thread_model.respondent.handle,
message_thread_model.initiator.handle
)
assert not user.have_messaged_by_username('a', 'b')
assert not user.have_messaged_by_username(
message_thread_model.respondent.handle, 'a'
)
T.factory.user('b')
assert not user.have_messaged_by_username(
'b', message_thread_model.initiator.handle
)
|
Add test file for user functions.from okcupyd.db import user
def test_have_messaged_before(T):
message_thread_model = T.factory.message_thread()
assert user.have_messaged_by_username(
message_thread_model.initiator.handle,
message_thread_model.respondent.handle
)
assert user.have_messaged_by_username(
message_thread_model.respondent.handle,
message_thread_model.initiator.handle
)
assert not user.have_messaged_by_username('a', 'b')
assert not user.have_messaged_by_username(
message_thread_model.respondent.handle, 'a'
)
T.factory.user('b')
assert not user.have_messaged_by_username(
'b', message_thread_model.initiator.handle
)
|
<commit_before><commit_msg>Add test file for user functions.<commit_after>from okcupyd.db import user
def test_have_messaged_before(T):
message_thread_model = T.factory.message_thread()
assert user.have_messaged_by_username(
message_thread_model.initiator.handle,
message_thread_model.respondent.handle
)
assert user.have_messaged_by_username(
message_thread_model.respondent.handle,
message_thread_model.initiator.handle
)
assert not user.have_messaged_by_username('a', 'b')
assert not user.have_messaged_by_username(
message_thread_model.respondent.handle, 'a'
)
T.factory.user('b')
assert not user.have_messaged_by_username(
'b', message_thread_model.initiator.handle
)
|
|
f8a0aa92c8e19bc11f8a609733644afe0efed5c8
|
decompose_test.py
|
decompose_test.py
|
from util.decompose_graph import decompose_graph
from core.himesis_utils import expand_graph, set_do_pickle, set_compression
set_do_pickle(True)
set_compression(6)
file_name = "226482067288742734644994685633991185819"
graph = expand_graph(file_name)
print(graph.name)
from core.himesis_utils import load_directory
contracts = load_directory("mbeddr2C_MM/Contracts/")
atomic_contracts = [
'AssignmentInstance'
]
if_then_contracts = []
prop_if_then_contracts = []
from core.himesis_utils import graph_to_dot, load_directory
from util.test_script_utils import select_rules, get_sub_and_super_classes,\
load_transformation, changePropertyProverMetamodel, set_supertypes, load_contracts
from util.slicer import Slicer
from util.parser import load_parser
inputMM = "./mbeddr2C_MM/ecore_metamodels/Module.ecore"
outputMM = "./mbeddr2C_MM/ecore_metamodels/C.ecore"
subclasses_dict, superclasses_dict = get_sub_and_super_classes(inputMM, outputMM)
atomic_contracts, if_then_contracts = load_contracts(contracts, superclasses_dict,
atomic_contracts, if_then_contracts,
prop_if_then_contracts,
False)
contract =atomic_contracts[0][1]
print(contract)
print(contract.has_pivots())
#graph_to_dot(graph.name, graph, force_trace_links = True)
import time
print("Starting to check")
start_time = time.time()
result = contract.check(graph)
print(result)
print("Finished in " + str(time.time() - start_time) + " seconds")
#decompose_graph(graph)
|
Update test script to do match testing.
|
Update test script to do match testing.
|
Python
|
mit
|
levilucio/SyVOLT,levilucio/SyVOLT
|
Update test script to do match testing.
|
from util.decompose_graph import decompose_graph
from core.himesis_utils import expand_graph, set_do_pickle, set_compression
set_do_pickle(True)
set_compression(6)
file_name = "226482067288742734644994685633991185819"
graph = expand_graph(file_name)
print(graph.name)
from core.himesis_utils import load_directory
contracts = load_directory("mbeddr2C_MM/Contracts/")
atomic_contracts = [
'AssignmentInstance'
]
if_then_contracts = []
prop_if_then_contracts = []
from core.himesis_utils import graph_to_dot, load_directory
from util.test_script_utils import select_rules, get_sub_and_super_classes,\
load_transformation, changePropertyProverMetamodel, set_supertypes, load_contracts
from util.slicer import Slicer
from util.parser import load_parser
inputMM = "./mbeddr2C_MM/ecore_metamodels/Module.ecore"
outputMM = "./mbeddr2C_MM/ecore_metamodels/C.ecore"
subclasses_dict, superclasses_dict = get_sub_and_super_classes(inputMM, outputMM)
atomic_contracts, if_then_contracts = load_contracts(contracts, superclasses_dict,
atomic_contracts, if_then_contracts,
prop_if_then_contracts,
False)
contract =atomic_contracts[0][1]
print(contract)
print(contract.has_pivots())
#graph_to_dot(graph.name, graph, force_trace_links = True)
import time
print("Starting to check")
start_time = time.time()
result = contract.check(graph)
print(result)
print("Finished in " + str(time.time() - start_time) + " seconds")
#decompose_graph(graph)
|
<commit_before><commit_msg>Update test script to do match testing.<commit_after>
|
from util.decompose_graph import decompose_graph
from core.himesis_utils import expand_graph, set_do_pickle, set_compression
set_do_pickle(True)
set_compression(6)
file_name = "226482067288742734644994685633991185819"
graph = expand_graph(file_name)
print(graph.name)
from core.himesis_utils import load_directory
contracts = load_directory("mbeddr2C_MM/Contracts/")
atomic_contracts = [
'AssignmentInstance'
]
if_then_contracts = []
prop_if_then_contracts = []
from core.himesis_utils import graph_to_dot, load_directory
from util.test_script_utils import select_rules, get_sub_and_super_classes,\
load_transformation, changePropertyProverMetamodel, set_supertypes, load_contracts
from util.slicer import Slicer
from util.parser import load_parser
inputMM = "./mbeddr2C_MM/ecore_metamodels/Module.ecore"
outputMM = "./mbeddr2C_MM/ecore_metamodels/C.ecore"
subclasses_dict, superclasses_dict = get_sub_and_super_classes(inputMM, outputMM)
atomic_contracts, if_then_contracts = load_contracts(contracts, superclasses_dict,
atomic_contracts, if_then_contracts,
prop_if_then_contracts,
False)
contract =atomic_contracts[0][1]
print(contract)
print(contract.has_pivots())
#graph_to_dot(graph.name, graph, force_trace_links = True)
import time
print("Starting to check")
start_time = time.time()
result = contract.check(graph)
print(result)
print("Finished in " + str(time.time() - start_time) + " seconds")
#decompose_graph(graph)
|
Update test script to do match testing.from util.decompose_graph import decompose_graph
from core.himesis_utils import expand_graph, set_do_pickle, set_compression
set_do_pickle(True)
set_compression(6)
file_name = "226482067288742734644994685633991185819"
graph = expand_graph(file_name)
print(graph.name)
from core.himesis_utils import load_directory
contracts = load_directory("mbeddr2C_MM/Contracts/")
atomic_contracts = [
'AssignmentInstance'
]
if_then_contracts = []
prop_if_then_contracts = []
from core.himesis_utils import graph_to_dot, load_directory
from util.test_script_utils import select_rules, get_sub_and_super_classes,\
load_transformation, changePropertyProverMetamodel, set_supertypes, load_contracts
from util.slicer import Slicer
from util.parser import load_parser
inputMM = "./mbeddr2C_MM/ecore_metamodels/Module.ecore"
outputMM = "./mbeddr2C_MM/ecore_metamodels/C.ecore"
subclasses_dict, superclasses_dict = get_sub_and_super_classes(inputMM, outputMM)
atomic_contracts, if_then_contracts = load_contracts(contracts, superclasses_dict,
atomic_contracts, if_then_contracts,
prop_if_then_contracts,
False)
contract =atomic_contracts[0][1]
print(contract)
print(contract.has_pivots())
#graph_to_dot(graph.name, graph, force_trace_links = True)
import time
print("Starting to check")
start_time = time.time()
result = contract.check(graph)
print(result)
print("Finished in " + str(time.time() - start_time) + " seconds")
#decompose_graph(graph)
|
<commit_before><commit_msg>Update test script to do match testing.<commit_after>from util.decompose_graph import decompose_graph
from core.himesis_utils import expand_graph, set_do_pickle, set_compression
set_do_pickle(True)
set_compression(6)
file_name = "226482067288742734644994685633991185819"
graph = expand_graph(file_name)
print(graph.name)
from core.himesis_utils import load_directory
contracts = load_directory("mbeddr2C_MM/Contracts/")
atomic_contracts = [
'AssignmentInstance'
]
if_then_contracts = []
prop_if_then_contracts = []
from core.himesis_utils import graph_to_dot, load_directory
from util.test_script_utils import select_rules, get_sub_and_super_classes,\
load_transformation, changePropertyProverMetamodel, set_supertypes, load_contracts
from util.slicer import Slicer
from util.parser import load_parser
inputMM = "./mbeddr2C_MM/ecore_metamodels/Module.ecore"
outputMM = "./mbeddr2C_MM/ecore_metamodels/C.ecore"
subclasses_dict, superclasses_dict = get_sub_and_super_classes(inputMM, outputMM)
atomic_contracts, if_then_contracts = load_contracts(contracts, superclasses_dict,
atomic_contracts, if_then_contracts,
prop_if_then_contracts,
False)
contract =atomic_contracts[0][1]
print(contract)
print(contract.has_pivots())
#graph_to_dot(graph.name, graph, force_trace_links = True)
import time
print("Starting to check")
start_time = time.time()
result = contract.check(graph)
print(result)
print("Finished in " + str(time.time() - start_time) + " seconds")
#decompose_graph(graph)
|
|
7e96013f21bbb5003b30da1e04833dcf58650602
|
freenoted/tasks/tornado_thrift.py
|
freenoted/tasks/tornado_thrift.py
|
from __future__ import absolute_import
import tornado.web
from thrift.transport.TTransport import TMemoryBuffer
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
class TornadoThriftHandler(tornado.web.RequestHandler):
def initialize(self, processor):
self.processor = processor
def post(self):
iprot = TBinaryProtocol(TMemoryBuffer(self.request.body))
oprot = TBinaryProtocol(TMemoryBuffer())
self.processor.process(iprot, oprot)
self.set_header('Content-Type', 'application/x-thrift')
self.write(oprot.trans.getvalue())
|
Implement a ThriftHandler for tornado
|
Implement a ThriftHandler for tornado
This Handler requires a processor=... kwarg (e.g., self.service) for the
third value in the handler description (e.g.,:
('/thrift', TornadoThriftHandler, dict(processor=self.service))
).
|
Python
|
bsd-3-clause
|
fmoo/sparts,bboozzoo/sparts,facebook/sparts,pshuff/sparts,fmoo/sparts,djipko/sparts,bboozzoo/sparts,djipko/sparts,facebook/sparts,pshuff/sparts
|
Implement a ThriftHandler for tornado
This Handler requires a processor=... kwarg (e.g., self.service) for the
third value in the handler description (e.g.,:
('/thrift', TornadoThriftHandler, dict(processor=self.service))
).
|
from __future__ import absolute_import
import tornado.web
from thrift.transport.TTransport import TMemoryBuffer
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
class TornadoThriftHandler(tornado.web.RequestHandler):
def initialize(self, processor):
self.processor = processor
def post(self):
iprot = TBinaryProtocol(TMemoryBuffer(self.request.body))
oprot = TBinaryProtocol(TMemoryBuffer())
self.processor.process(iprot, oprot)
self.set_header('Content-Type', 'application/x-thrift')
self.write(oprot.trans.getvalue())
|
<commit_before><commit_msg>Implement a ThriftHandler for tornado
This Handler requires a processor=... kwarg (e.g., self.service) for the
third value in the handler description (e.g.,:
('/thrift', TornadoThriftHandler, dict(processor=self.service))
).<commit_after>
|
from __future__ import absolute_import
import tornado.web
from thrift.transport.TTransport import TMemoryBuffer
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
class TornadoThriftHandler(tornado.web.RequestHandler):
def initialize(self, processor):
self.processor = processor
def post(self):
iprot = TBinaryProtocol(TMemoryBuffer(self.request.body))
oprot = TBinaryProtocol(TMemoryBuffer())
self.processor.process(iprot, oprot)
self.set_header('Content-Type', 'application/x-thrift')
self.write(oprot.trans.getvalue())
|
Implement a ThriftHandler for tornado
This Handler requires a processor=... kwarg (e.g., self.service) for the
third value in the handler description (e.g.,:
('/thrift', TornadoThriftHandler, dict(processor=self.service))
).from __future__ import absolute_import
import tornado.web
from thrift.transport.TTransport import TMemoryBuffer
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
class TornadoThriftHandler(tornado.web.RequestHandler):
def initialize(self, processor):
self.processor = processor
def post(self):
iprot = TBinaryProtocol(TMemoryBuffer(self.request.body))
oprot = TBinaryProtocol(TMemoryBuffer())
self.processor.process(iprot, oprot)
self.set_header('Content-Type', 'application/x-thrift')
self.write(oprot.trans.getvalue())
|
<commit_before><commit_msg>Implement a ThriftHandler for tornado
This Handler requires a processor=... kwarg (e.g., self.service) for the
third value in the handler description (e.g.,:
('/thrift', TornadoThriftHandler, dict(processor=self.service))
).<commit_after>from __future__ import absolute_import
import tornado.web
from thrift.transport.TTransport import TMemoryBuffer
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
class TornadoThriftHandler(tornado.web.RequestHandler):
def initialize(self, processor):
self.processor = processor
def post(self):
iprot = TBinaryProtocol(TMemoryBuffer(self.request.body))
oprot = TBinaryProtocol(TMemoryBuffer())
self.processor.process(iprot, oprot)
self.set_header('Content-Type', 'application/x-thrift')
self.write(oprot.trans.getvalue())
|
|
d60c1f9a6e56472611a96779462b42e8505e7905
|
python/pdf_to_img.py
|
python/pdf_to_img.py
|
import requests
import json
# Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint - https://pixlab.io/cmd?id=pdftoimg
req = requests.get('https://api.pixlab.io/pdftoimg',params={
'src':'https://www.getharvest.com/downloads/Invoice_Template.pdf',
'export': 'jpeg',
'key':'My_PixLab_Key'
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Link to the image output (Converted PDF page): "+ reply['link'])
|
Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint
|
Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint
|
Python
|
bsd-2-clause
|
symisc/pixlab,symisc/pixlab,symisc/pixlab
|
Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint
|
import requests
import json
# Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint - https://pixlab.io/cmd?id=pdftoimg
req = requests.get('https://api.pixlab.io/pdftoimg',params={
'src':'https://www.getharvest.com/downloads/Invoice_Template.pdf',
'export': 'jpeg',
'key':'My_PixLab_Key'
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Link to the image output (Converted PDF page): "+ reply['link'])
|
<commit_before><commit_msg>Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint<commit_after>
|
import requests
import json
# Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint - https://pixlab.io/cmd?id=pdftoimg
req = requests.get('https://api.pixlab.io/pdftoimg',params={
'src':'https://www.getharvest.com/downloads/Invoice_Template.pdf',
'export': 'jpeg',
'key':'My_PixLab_Key'
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Link to the image output (Converted PDF page): "+ reply['link'])
|
Convert a PDF document to JPEG/PNG image via /pdftoimg endpointimport requests
import json
# Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint - https://pixlab.io/cmd?id=pdftoimg
req = requests.get('https://api.pixlab.io/pdftoimg',params={
'src':'https://www.getharvest.com/downloads/Invoice_Template.pdf',
'export': 'jpeg',
'key':'My_PixLab_Key'
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Link to the image output (Converted PDF page): "+ reply['link'])
|
<commit_before><commit_msg>Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint<commit_after>import requests
import json
# Convert a PDF document to JPEG/PNG image via /pdftoimg endpoint - https://pixlab.io/cmd?id=pdftoimg
req = requests.get('https://api.pixlab.io/pdftoimg',params={
'src':'https://www.getharvest.com/downloads/Invoice_Template.pdf',
'export': 'jpeg',
'key':'My_PixLab_Key'
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Link to the image output (Converted PDF page): "+ reply['link'])
|
|
b47369d43a0a85ac2bc32bfa77c6a4d9074ce700
|
test/test_retrieve_dns.py
|
test/test_retrieve_dns.py
|
import logging
import os
import tempfile
import unittest
import mock
import bin.retrieve_dns
logging.basicConfig(level=logging.INFO)
class RetrieveDnsTestCase(unittest.TestCase):
def setUp(self):
# Mock out logging
mock.patch('bin.retrieve_dns.set_up_logging', autospec=True).start()
# Mock out config
mock_config = mock.patch('bin.retrieve_dns.get_config', autospec=True).start()
# Mock out retrieving xml
self.mock_xml = mock.patch('bin.retrieve_dns.get_xml', autospec=True).start()
# Set up temp files
self.files = {}
for item in ('dn', 'extra', 'ban'):
self.files[item] = dict(zip(('handle', 'path'), tempfile.mkstemp()))
os.write(self.files[item]['handle'], '/wobble')
for item in self.files.values():
os.close(item['handle'])
# Set up config using temp files
c = bin.retrieve_dns.Configuration()
c.dn_file = self.files['dn']['path']
c.extra_dns = self.files['extra']['path']
c.banned_dns = self.files['ban']['path']
mock_config.return_value = c
def test_basics(self):
self.mock_xml.return_value = "<HOSTDN>/wibble</HOSTDN>"
bin.retrieve_dns.runprocess("fakefile", "fakefile")
dns = open(self.files['dn']['path'])
self.assertEqual(dns.read(), '/wibble\n')
dns.close()
def tearDown(self):
# Delete temp files
for item in self.files.values():
os.remove(item['path'])
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
Add basic test case for retrieve_dns module
|
Add basic test case for retrieve_dns module
|
Python
|
apache-2.0
|
apel/apel,stfc/apel,tofu-rocketry/apel,apel/apel,tofu-rocketry/apel,stfc/apel
|
Add basic test case for retrieve_dns module
|
import logging
import os
import tempfile
import unittest
import mock
import bin.retrieve_dns
logging.basicConfig(level=logging.INFO)
class RetrieveDnsTestCase(unittest.TestCase):
def setUp(self):
# Mock out logging
mock.patch('bin.retrieve_dns.set_up_logging', autospec=True).start()
# Mock out config
mock_config = mock.patch('bin.retrieve_dns.get_config', autospec=True).start()
# Mock out retrieving xml
self.mock_xml = mock.patch('bin.retrieve_dns.get_xml', autospec=True).start()
# Set up temp files
self.files = {}
for item in ('dn', 'extra', 'ban'):
self.files[item] = dict(zip(('handle', 'path'), tempfile.mkstemp()))
os.write(self.files[item]['handle'], '/wobble')
for item in self.files.values():
os.close(item['handle'])
# Set up config using temp files
c = bin.retrieve_dns.Configuration()
c.dn_file = self.files['dn']['path']
c.extra_dns = self.files['extra']['path']
c.banned_dns = self.files['ban']['path']
mock_config.return_value = c
def test_basics(self):
self.mock_xml.return_value = "<HOSTDN>/wibble</HOSTDN>"
bin.retrieve_dns.runprocess("fakefile", "fakefile")
dns = open(self.files['dn']['path'])
self.assertEqual(dns.read(), '/wibble\n')
dns.close()
def tearDown(self):
# Delete temp files
for item in self.files.values():
os.remove(item['path'])
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic test case for retrieve_dns module<commit_after>
|
import logging
import os
import tempfile
import unittest
import mock
import bin.retrieve_dns
logging.basicConfig(level=logging.INFO)
class RetrieveDnsTestCase(unittest.TestCase):
def setUp(self):
# Mock out logging
mock.patch('bin.retrieve_dns.set_up_logging', autospec=True).start()
# Mock out config
mock_config = mock.patch('bin.retrieve_dns.get_config', autospec=True).start()
# Mock out retrieving xml
self.mock_xml = mock.patch('bin.retrieve_dns.get_xml', autospec=True).start()
# Set up temp files
self.files = {}
for item in ('dn', 'extra', 'ban'):
self.files[item] = dict(zip(('handle', 'path'), tempfile.mkstemp()))
os.write(self.files[item]['handle'], '/wobble')
for item in self.files.values():
os.close(item['handle'])
# Set up config using temp files
c = bin.retrieve_dns.Configuration()
c.dn_file = self.files['dn']['path']
c.extra_dns = self.files['extra']['path']
c.banned_dns = self.files['ban']['path']
mock_config.return_value = c
def test_basics(self):
self.mock_xml.return_value = "<HOSTDN>/wibble</HOSTDN>"
bin.retrieve_dns.runprocess("fakefile", "fakefile")
dns = open(self.files['dn']['path'])
self.assertEqual(dns.read(), '/wibble\n')
dns.close()
def tearDown(self):
# Delete temp files
for item in self.files.values():
os.remove(item['path'])
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
Add basic test case for retrieve_dns moduleimport logging
import os
import tempfile
import unittest
import mock
import bin.retrieve_dns
logging.basicConfig(level=logging.INFO)
class RetrieveDnsTestCase(unittest.TestCase):
def setUp(self):
# Mock out logging
mock.patch('bin.retrieve_dns.set_up_logging', autospec=True).start()
# Mock out config
mock_config = mock.patch('bin.retrieve_dns.get_config', autospec=True).start()
# Mock out retrieving xml
self.mock_xml = mock.patch('bin.retrieve_dns.get_xml', autospec=True).start()
# Set up temp files
self.files = {}
for item in ('dn', 'extra', 'ban'):
self.files[item] = dict(zip(('handle', 'path'), tempfile.mkstemp()))
os.write(self.files[item]['handle'], '/wobble')
for item in self.files.values():
os.close(item['handle'])
# Set up config using temp files
c = bin.retrieve_dns.Configuration()
c.dn_file = self.files['dn']['path']
c.extra_dns = self.files['extra']['path']
c.banned_dns = self.files['ban']['path']
mock_config.return_value = c
def test_basics(self):
self.mock_xml.return_value = "<HOSTDN>/wibble</HOSTDN>"
bin.retrieve_dns.runprocess("fakefile", "fakefile")
dns = open(self.files['dn']['path'])
self.assertEqual(dns.read(), '/wibble\n')
dns.close()
def tearDown(self):
# Delete temp files
for item in self.files.values():
os.remove(item['path'])
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic test case for retrieve_dns module<commit_after>import logging
import os
import tempfile
import unittest
import mock
import bin.retrieve_dns
logging.basicConfig(level=logging.INFO)
class RetrieveDnsTestCase(unittest.TestCase):
def setUp(self):
# Mock out logging
mock.patch('bin.retrieve_dns.set_up_logging', autospec=True).start()
# Mock out config
mock_config = mock.patch('bin.retrieve_dns.get_config', autospec=True).start()
# Mock out retrieving xml
self.mock_xml = mock.patch('bin.retrieve_dns.get_xml', autospec=True).start()
# Set up temp files
self.files = {}
for item in ('dn', 'extra', 'ban'):
self.files[item] = dict(zip(('handle', 'path'), tempfile.mkstemp()))
os.write(self.files[item]['handle'], '/wobble')
for item in self.files.values():
os.close(item['handle'])
# Set up config using temp files
c = bin.retrieve_dns.Configuration()
c.dn_file = self.files['dn']['path']
c.extra_dns = self.files['extra']['path']
c.banned_dns = self.files['ban']['path']
mock_config.return_value = c
def test_basics(self):
self.mock_xml.return_value = "<HOSTDN>/wibble</HOSTDN>"
bin.retrieve_dns.runprocess("fakefile", "fakefile")
dns = open(self.files['dn']['path'])
self.assertEqual(dns.read(), '/wibble\n')
dns.close()
def tearDown(self):
# Delete temp files
for item in self.files.values():
os.remove(item['path'])
mock.patch.stopall()
if __name__ == '__main__':
unittest.main()
|
|
9dab373023fa6b7767cd7555a533161752205eda
|
scripts/0-weighted-affine.py
|
scripts/0-weighted-affine.py
|
#!/usr/bin/python
import sys
sys.path.append('../lib')
import transformations
v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
#weights = [1.0, 1.0, 1.0, 1.0]
weights = [0.1, 0.01, 0.1, 0.2]
print "original"
print transformations.affine_matrix_from_points(v0, v1, shear=False)
print "weighted"
print transformations.affine_matrix_from_points_weighted(v0, v1, weights, shear=False)
|
Test a weighted affine solver.
|
Test a weighted affine solver.
Former-commit-id: b8876ed995f7c2ec029697ccd815957bc4a6cc93
|
Python
|
mit
|
UASLab/ImageAnalysis
|
Test a weighted affine solver.
Former-commit-id: b8876ed995f7c2ec029697ccd815957bc4a6cc93
|
#!/usr/bin/python
import sys
sys.path.append('../lib')
import transformations
v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
#weights = [1.0, 1.0, 1.0, 1.0]
weights = [0.1, 0.01, 0.1, 0.2]
print "original"
print transformations.affine_matrix_from_points(v0, v1, shear=False)
print "weighted"
print transformations.affine_matrix_from_points_weighted(v0, v1, weights, shear=False)
|
<commit_before><commit_msg>Test a weighted affine solver.
Former-commit-id: b8876ed995f7c2ec029697ccd815957bc4a6cc93<commit_after>
|
#!/usr/bin/python
import sys
sys.path.append('../lib')
import transformations
v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
#weights = [1.0, 1.0, 1.0, 1.0]
weights = [0.1, 0.01, 0.1, 0.2]
print "original"
print transformations.affine_matrix_from_points(v0, v1, shear=False)
print "weighted"
print transformations.affine_matrix_from_points_weighted(v0, v1, weights, shear=False)
|
Test a weighted affine solver.
Former-commit-id: b8876ed995f7c2ec029697ccd815957bc4a6cc93#!/usr/bin/python
import sys
sys.path.append('../lib')
import transformations
v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
#weights = [1.0, 1.0, 1.0, 1.0]
weights = [0.1, 0.01, 0.1, 0.2]
print "original"
print transformations.affine_matrix_from_points(v0, v1, shear=False)
print "weighted"
print transformations.affine_matrix_from_points_weighted(v0, v1, weights, shear=False)
|
<commit_before><commit_msg>Test a weighted affine solver.
Former-commit-id: b8876ed995f7c2ec029697ccd815957bc4a6cc93<commit_after>#!/usr/bin/python
import sys
sys.path.append('../lib')
import transformations
v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
#weights = [1.0, 1.0, 1.0, 1.0]
weights = [0.1, 0.01, 0.1, 0.2]
print "original"
print transformations.affine_matrix_from_points(v0, v1, shear=False)
print "weighted"
print transformations.affine_matrix_from_points_weighted(v0, v1, weights, shear=False)
|
|
260e0ef2bc37750dccea47d30110221c272e757a
|
run_all_corpora.py
|
run_all_corpora.py
|
import os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = []
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## loop through files in the directory
for corpus in os.listdir(args.corpusdir):
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}" failed)
|
Add script for automating analysis for all corpora
|
Add script for automating analysis for all corpora
|
Python
|
mit
|
MontrealCorpusTools/SPADE,MontrealCorpusTools/SPADE
|
Add script for automating analysis for all corpora
|
import os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = []
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## loop through files in the directory
for corpus in os.listdir(args.corpusdir):
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}" failed)
|
<commit_before><commit_msg>Add script for automating analysis for all corpora<commit_after>
|
import os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = []
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## loop through files in the directory
for corpus in os.listdir(args.corpusdir):
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}" failed)
|
Add script for automating analysis for all corporaimport os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = []
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## loop through files in the directory
for corpus in os.listdir(args.corpusdir):
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}" failed)
|
<commit_before><commit_msg>Add script for automating analysis for all corpora<commit_after>import os
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("corpusdir", help = "Path to the directory containing corpus directories")
parser.add_argument("script", help = "name of the script to be run")
args = parser.parse_args()
## lists of corpora to skip
## and failed to run
skipped = []
failed = []
## first check that the script exists
assert(os.path.isfile(args.script), "{} should be a script that exists".format(args.script))
## loop through files in the directory
for corpus in os.listdir(args.corpusdir):
## check if the file is actually a directory since that is the expected format for the
## analysis scripts
if os.path.isdir(corpus):
if corpus in skipped:
continue
try:
print("Processing {}".format(corpus))
## first reset the corpus
subprocess.call(['python', 'reset_database.py', corpus])
## run the script on the corpus
subprocess.call(['python', args.script, corpus, "-s"])
except:
failed.append(corpus)
continue
print("Complete!")
print("Following corpora were not run: {}" failed)
|
|
d17c14df00c31af49080ff2f9fea8597a8861461
|
starthinker_ui/recipe/management/commands/recipe_usage.py
|
starthinker_ui/recipe/management/commands/recipe_usage.py
|
###########################################################################
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.account.models import Account
class Command(BaseCommand):
help = 'Prints recipe count and age by account.'
def handle(self, *args, **kwargs):
usage = []
for account in Account.objects.all():
usage.append({
'email':account.email,
'recipes':list(account.recipe_set.all().values_list('birthday', flat=True))
})
usage.sort(key=lambda u: len(u['recipes']), reverse=True)
for u in usage:
print ('{}, {}, {}'.format(u['email'], len(u['recipes']), max(u['recipes']) if u['recipes'] else ''))
|
Add recipe usage command for quick diagnostics.
|
Add recipe usage command for quick diagnostics.
PiperOrigin-RevId: 432485870
|
Python
|
apache-2.0
|
google/starthinker,google/starthinker,google/starthinker
|
Add recipe usage command for quick diagnostics.
PiperOrigin-RevId: 432485870
|
###########################################################################
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.account.models import Account
class Command(BaseCommand):
help = 'Prints recipe count and age by account.'
def handle(self, *args, **kwargs):
usage = []
for account in Account.objects.all():
usage.append({
'email':account.email,
'recipes':list(account.recipe_set.all().values_list('birthday', flat=True))
})
usage.sort(key=lambda u: len(u['recipes']), reverse=True)
for u in usage:
print ('{}, {}, {}'.format(u['email'], len(u['recipes']), max(u['recipes']) if u['recipes'] else ''))
|
<commit_before><commit_msg>Add recipe usage command for quick diagnostics.
PiperOrigin-RevId: 432485870<commit_after>
|
###########################################################################
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.account.models import Account
class Command(BaseCommand):
help = 'Prints recipe count and age by account.'
def handle(self, *args, **kwargs):
usage = []
for account in Account.objects.all():
usage.append({
'email':account.email,
'recipes':list(account.recipe_set.all().values_list('birthday', flat=True))
})
usage.sort(key=lambda u: len(u['recipes']), reverse=True)
for u in usage:
print ('{}, {}, {}'.format(u['email'], len(u['recipes']), max(u['recipes']) if u['recipes'] else ''))
|
Add recipe usage command for quick diagnostics.
PiperOrigin-RevId: 432485870###########################################################################
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.account.models import Account
class Command(BaseCommand):
help = 'Prints recipe count and age by account.'
def handle(self, *args, **kwargs):
usage = []
for account in Account.objects.all():
usage.append({
'email':account.email,
'recipes':list(account.recipe_set.all().values_list('birthday', flat=True))
})
usage.sort(key=lambda u: len(u['recipes']), reverse=True)
for u in usage:
print ('{}, {}, {}'.format(u['email'], len(u['recipes']), max(u['recipes']) if u['recipes'] else ''))
|
<commit_before><commit_msg>Add recipe usage command for quick diagnostics.
PiperOrigin-RevId: 432485870<commit_after>###########################################################################
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.account.models import Account
class Command(BaseCommand):
help = 'Prints recipe count and age by account.'
def handle(self, *args, **kwargs):
usage = []
for account in Account.objects.all():
usage.append({
'email':account.email,
'recipes':list(account.recipe_set.all().values_list('birthday', flat=True))
})
usage.sort(key=lambda u: len(u['recipes']), reverse=True)
for u in usage:
print ('{}, {}, {}'.format(u['email'], len(u['recipes']), max(u['recipes']) if u['recipes'] else ''))
|
|
e53a951ed98f460b603f43f6364d5d0a0f17a1ba
|
src/streamer.py
|
src/streamer.py
|
class pStream:
###PRIVATE FUNCTIONS
def _builder(self, expression):
self.STR = expression
return self
###OVERRIDES
def next(self):
return next(self.STR)
def __init__(self, iterable_thing):
self.STR = iterable_thing
def __iter__(self):
return iter(self.STR)
### TRANSFORMS
def map(self,function):
return self._builder(map(function, self.STR))
### CONSUMERS
def print_stream(self):
print(list(self.STR))
def consume(self, function):
function(self.STR)
def drain(self):
for x in self.STR:
pass
|
Add basic class structure, map functionality, and a set of consumer functions.
|
Add basic class structure, map functionality, and a set of consumer functions.
|
Python
|
mit
|
caffeine-potent/Streamer-Datastructure
|
Add basic class structure, map functionality, and a set of consumer functions.
|
class pStream:
###PRIVATE FUNCTIONS
def _builder(self, expression):
self.STR = expression
return self
###OVERRIDES
def next(self):
return next(self.STR)
def __init__(self, iterable_thing):
self.STR = iterable_thing
def __iter__(self):
return iter(self.STR)
### TRANSFORMS
def map(self,function):
return self._builder(map(function, self.STR))
### CONSUMERS
def print_stream(self):
print(list(self.STR))
def consume(self, function):
function(self.STR)
def drain(self):
for x in self.STR:
pass
|
<commit_before><commit_msg>Add basic class structure, map functionality, and a set of consumer functions.<commit_after>
|
class pStream:
###PRIVATE FUNCTIONS
def _builder(self, expression):
self.STR = expression
return self
###OVERRIDES
def next(self):
return next(self.STR)
def __init__(self, iterable_thing):
self.STR = iterable_thing
def __iter__(self):
return iter(self.STR)
### TRANSFORMS
def map(self,function):
return self._builder(map(function, self.STR))
### CONSUMERS
def print_stream(self):
print(list(self.STR))
def consume(self, function):
function(self.STR)
def drain(self):
for x in self.STR:
pass
|
Add basic class structure, map functionality, and a set of consumer functions.class pStream:
###PRIVATE FUNCTIONS
def _builder(self, expression):
self.STR = expression
return self
###OVERRIDES
def next(self):
return next(self.STR)
def __init__(self, iterable_thing):
self.STR = iterable_thing
def __iter__(self):
return iter(self.STR)
### TRANSFORMS
def map(self,function):
return self._builder(map(function, self.STR))
### CONSUMERS
def print_stream(self):
print(list(self.STR))
def consume(self, function):
function(self.STR)
def drain(self):
for x in self.STR:
pass
|
<commit_before><commit_msg>Add basic class structure, map functionality, and a set of consumer functions.<commit_after>class pStream:
###PRIVATE FUNCTIONS
def _builder(self, expression):
self.STR = expression
return self
###OVERRIDES
def next(self):
return next(self.STR)
def __init__(self, iterable_thing):
self.STR = iterable_thing
def __iter__(self):
return iter(self.STR)
### TRANSFORMS
def map(self,function):
return self._builder(map(function, self.STR))
### CONSUMERS
def print_stream(self):
print(list(self.STR))
def consume(self, function):
function(self.STR)
def drain(self):
for x in self.STR:
pass
|
|
0b6709670179c0721b4f113d13bf34d9ac7715dd
|
test/indices.py
|
test/indices.py
|
import matplotlib.pyplot as plt
import numpy
from math import factorial
def binom(a,b):
return factorial(a) / (factorial(b)*factorial(a-b))
def stirling(n,k):
if n<=0 or n!=0 and n==k:
return 1
elif k<=0 or n<k:
return 0
elif n==0 and k==0:
return -1
else:
s = sum((-1)**(k-j)*binom(k,j)*j**n for j in range(k+1))
return s / factorial(k)
log = []
with open("indices.log") as indices:
next(indices)
for line in indices:
indices = line.split()[1:7]
size = len(set(indices))
log.append(size)
stirlings = numpy.array([stirling(6, k) for k in range(1,7)])
plt.hist(log, [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5])
plt.plot(range(1,7), stirlings * len(log)/stirlings.sum())
plt.show()
|
Add a python plotter that compares the results of with Stirling numbers
|
Add a python plotter that compares the results of with Stirling numbers
|
Python
|
lgpl-2.1
|
Anaphory/parameterclone,Anaphory/parameterclone
|
Add a python plotter that compares the results of with Stirling numbers
|
import matplotlib.pyplot as plt
import numpy
from math import factorial
def binom(a,b):
return factorial(a) / (factorial(b)*factorial(a-b))
def stirling(n,k):
if n<=0 or n!=0 and n==k:
return 1
elif k<=0 or n<k:
return 0
elif n==0 and k==0:
return -1
else:
s = sum((-1)**(k-j)*binom(k,j)*j**n for j in range(k+1))
return s / factorial(k)
log = []
with open("indices.log") as indices:
next(indices)
for line in indices:
indices = line.split()[1:7]
size = len(set(indices))
log.append(size)
stirlings = numpy.array([stirling(6, k) for k in range(1,7)])
plt.hist(log, [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5])
plt.plot(range(1,7), stirlings * len(log)/stirlings.sum())
plt.show()
|
<commit_before><commit_msg>Add a python plotter that compares the results of with Stirling numbers<commit_after>
|
import matplotlib.pyplot as plt
import numpy
from math import factorial
def binom(a,b):
return factorial(a) / (factorial(b)*factorial(a-b))
def stirling(n,k):
if n<=0 or n!=0 and n==k:
return 1
elif k<=0 or n<k:
return 0
elif n==0 and k==0:
return -1
else:
s = sum((-1)**(k-j)*binom(k,j)*j**n for j in range(k+1))
return s / factorial(k)
log = []
with open("indices.log") as indices:
next(indices)
for line in indices:
indices = line.split()[1:7]
size = len(set(indices))
log.append(size)
stirlings = numpy.array([stirling(6, k) for k in range(1,7)])
plt.hist(log, [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5])
plt.plot(range(1,7), stirlings * len(log)/stirlings.sum())
plt.show()
|
Add a python plotter that compares the results of with Stirling numbersimport matplotlib.pyplot as plt
import numpy
from math import factorial
def binom(a,b):
return factorial(a) / (factorial(b)*factorial(a-b))
def stirling(n,k):
if n<=0 or n!=0 and n==k:
return 1
elif k<=0 or n<k:
return 0
elif n==0 and k==0:
return -1
else:
s = sum((-1)**(k-j)*binom(k,j)*j**n for j in range(k+1))
return s / factorial(k)
log = []
with open("indices.log") as indices:
next(indices)
for line in indices:
indices = line.split()[1:7]
size = len(set(indices))
log.append(size)
stirlings = numpy.array([stirling(6, k) for k in range(1,7)])
plt.hist(log, [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5])
plt.plot(range(1,7), stirlings * len(log)/stirlings.sum())
plt.show()
|
<commit_before><commit_msg>Add a python plotter that compares the results of with Stirling numbers<commit_after>import matplotlib.pyplot as plt
import numpy
from math import factorial
def binom(a,b):
return factorial(a) / (factorial(b)*factorial(a-b))
def stirling(n,k):
if n<=0 or n!=0 and n==k:
return 1
elif k<=0 or n<k:
return 0
elif n==0 and k==0:
return -1
else:
s = sum((-1)**(k-j)*binom(k,j)*j**n for j in range(k+1))
return s / factorial(k)
log = []
with open("indices.log") as indices:
next(indices)
for line in indices:
indices = line.split()[1:7]
size = len(set(indices))
log.append(size)
stirlings = numpy.array([stirling(6, k) for k in range(1,7)])
plt.hist(log, [0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5])
plt.plot(range(1,7), stirlings * len(log)/stirlings.sum())
plt.show()
|
|
7b179e4a420a3cd7a27f0f438a6eac462048bb93
|
py/brick-wall.py
|
py/brick-wall.py
|
import heapq
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
n_row = len(wall)
heap = [(wall[i][0], i, 0) for i in xrange(n_row)]
heapq.heapify(heap)
max_noncross = 0
while True:
l, idx, offset = heapq.heappop(heap)
cur_l = l
if offset == len(wall[idx]) - 1:
break
heapq.heappush(heap, (l + wall[idx][offset + 1], idx, offset + 1))
cnt = 1
while True:
ol, oidx, ooffset = heapq.heappop(heap)
if ol == l:
cnt += 1
heapq.heappush(heap, (ol + wall[oidx][ooffset + 1], oidx, ooffset + 1))
elif ol > l:
heapq.heappush(heap, (ol, oidx, ooffset))
break
max_noncross = max(max_noncross, cnt)
return n_row - max_noncross
|
Add py solution for 554. Brick Wall
|
Add py solution for 554. Brick Wall
554. Brick Wall: https://leetcode.com/problems/brick-wall/
Approach1:
O(n_brick * log(n_row)) Use heap to find the least length of accumulated row
and find how many rows can match such length
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 554. Brick Wall
554. Brick Wall: https://leetcode.com/problems/brick-wall/
Approach1:
O(n_brick * log(n_row)) Use heap to find the least length of accumulated row
and find how many rows can match such length
|
import heapq
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
n_row = len(wall)
heap = [(wall[i][0], i, 0) for i in xrange(n_row)]
heapq.heapify(heap)
max_noncross = 0
while True:
l, idx, offset = heapq.heappop(heap)
cur_l = l
if offset == len(wall[idx]) - 1:
break
heapq.heappush(heap, (l + wall[idx][offset + 1], idx, offset + 1))
cnt = 1
while True:
ol, oidx, ooffset = heapq.heappop(heap)
if ol == l:
cnt += 1
heapq.heappush(heap, (ol + wall[oidx][ooffset + 1], oidx, ooffset + 1))
elif ol > l:
heapq.heappush(heap, (ol, oidx, ooffset))
break
max_noncross = max(max_noncross, cnt)
return n_row - max_noncross
|
<commit_before><commit_msg>Add py solution for 554. Brick Wall
554. Brick Wall: https://leetcode.com/problems/brick-wall/
Approach1:
O(n_brick * log(n_row)) Use heap to find the least length of accumulated row
and find how many rows can match such length<commit_after>
|
import heapq
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
n_row = len(wall)
heap = [(wall[i][0], i, 0) for i in xrange(n_row)]
heapq.heapify(heap)
max_noncross = 0
while True:
l, idx, offset = heapq.heappop(heap)
cur_l = l
if offset == len(wall[idx]) - 1:
break
heapq.heappush(heap, (l + wall[idx][offset + 1], idx, offset + 1))
cnt = 1
while True:
ol, oidx, ooffset = heapq.heappop(heap)
if ol == l:
cnt += 1
heapq.heappush(heap, (ol + wall[oidx][ooffset + 1], oidx, ooffset + 1))
elif ol > l:
heapq.heappush(heap, (ol, oidx, ooffset))
break
max_noncross = max(max_noncross, cnt)
return n_row - max_noncross
|
Add py solution for 554. Brick Wall
554. Brick Wall: https://leetcode.com/problems/brick-wall/
Approach1:
O(n_brick * log(n_row)) Use heap to find the least length of accumulated row
and find how many rows can match such lengthimport heapq
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
n_row = len(wall)
heap = [(wall[i][0], i, 0) for i in xrange(n_row)]
heapq.heapify(heap)
max_noncross = 0
while True:
l, idx, offset = heapq.heappop(heap)
cur_l = l
if offset == len(wall[idx]) - 1:
break
heapq.heappush(heap, (l + wall[idx][offset + 1], idx, offset + 1))
cnt = 1
while True:
ol, oidx, ooffset = heapq.heappop(heap)
if ol == l:
cnt += 1
heapq.heappush(heap, (ol + wall[oidx][ooffset + 1], oidx, ooffset + 1))
elif ol > l:
heapq.heappush(heap, (ol, oidx, ooffset))
break
max_noncross = max(max_noncross, cnt)
return n_row - max_noncross
|
<commit_before><commit_msg>Add py solution for 554. Brick Wall
554. Brick Wall: https://leetcode.com/problems/brick-wall/
Approach1:
O(n_brick * log(n_row)) Use heap to find the least length of accumulated row
and find how many rows can match such length<commit_after>import heapq
class Solution(object):
def leastBricks(self, wall):
"""
:type wall: List[List[int]]
:rtype: int
"""
n_row = len(wall)
heap = [(wall[i][0], i, 0) for i in xrange(n_row)]
heapq.heapify(heap)
max_noncross = 0
while True:
l, idx, offset = heapq.heappop(heap)
cur_l = l
if offset == len(wall[idx]) - 1:
break
heapq.heappush(heap, (l + wall[idx][offset + 1], idx, offset + 1))
cnt = 1
while True:
ol, oidx, ooffset = heapq.heappop(heap)
if ol == l:
cnt += 1
heapq.heappush(heap, (ol + wall[oidx][ooffset + 1], oidx, ooffset + 1))
elif ol > l:
heapq.heappush(heap, (ol, oidx, ooffset))
break
max_noncross = max(max_noncross, cnt)
return n_row - max_noncross
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.