commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b595e1be84159c27b9d9bb81bbd66b78e5c084ce
|
pyoommf/small_example.py
|
pyoommf/small_example.py
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
#sim.execute_mif()
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
|
Remove separate execute mif command.
|
Remove separate execute mif command.
|
Python
|
bsd-2-clause
|
ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
#sim.execute_mif()
Remove separate execute mif command.
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
|
<commit_before>from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
#sim.execute_mif()
<commit_msg>Remove separate execute mif command.<commit_after>
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
#sim.execute_mif()
Remove separate execute mif command.from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
|
<commit_before>from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
#sim.execute_mif()
<commit_msg>Remove separate execute mif command.<commit_after>from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example')
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
|
b6d1984aa264c785c31f90485ffefa88fd0149dd
|
takeyourmeds/reminders/migrations/0002_reminder_type.py
|
takeyourmeds/reminders/migrations/0002_reminder_type.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-16 09:44
from __future__ import unicode_literals
from django.db import migrations, models
def populate_type_field(apps, schema_editor):
from takeyourmeds.reminders.enums import TypeEnum
Reminder = apps.get_model('reminders', 'Reminder')
for x in Reminder.objects.all():
x.type = TypeEnum.call if x.audio_url else TypeEnum.message
x.save()
class Migration(migrations.Migration):
dependencies = [
('reminders', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='reminder',
name='type',
field=models.IntegerField(choices=[(b'call', 10), (b'message', 20)], default=10),
preserve_default=False,
),
migrations.RunPython(
migrations.RunPython.noop,
populate_type_field,
),
]
|
Move to a type field
|
Move to a type field
|
Python
|
mit
|
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
|
Move to a type field
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-16 09:44
from __future__ import unicode_literals
from django.db import migrations, models
def populate_type_field(apps, schema_editor):
from takeyourmeds.reminders.enums import TypeEnum
Reminder = apps.get_model('reminders', 'Reminder')
for x in Reminder.objects.all():
x.type = TypeEnum.call if x.audio_url else TypeEnum.message
x.save()
class Migration(migrations.Migration):
dependencies = [
('reminders', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='reminder',
name='type',
field=models.IntegerField(choices=[(b'call', 10), (b'message', 20)], default=10),
preserve_default=False,
),
migrations.RunPython(
migrations.RunPython.noop,
populate_type_field,
),
]
|
<commit_before><commit_msg>Move to a type field<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-16 09:44
from __future__ import unicode_literals
from django.db import migrations, models
def populate_type_field(apps, schema_editor):
from takeyourmeds.reminders.enums import TypeEnum
Reminder = apps.get_model('reminders', 'Reminder')
for x in Reminder.objects.all():
x.type = TypeEnum.call if x.audio_url else TypeEnum.message
x.save()
class Migration(migrations.Migration):
dependencies = [
('reminders', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='reminder',
name='type',
field=models.IntegerField(choices=[(b'call', 10), (b'message', 20)], default=10),
preserve_default=False,
),
migrations.RunPython(
migrations.RunPython.noop,
populate_type_field,
),
]
|
Move to a type field# -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-16 09:44
from __future__ import unicode_literals
from django.db import migrations, models
def populate_type_field(apps, schema_editor):
from takeyourmeds.reminders.enums import TypeEnum
Reminder = apps.get_model('reminders', 'Reminder')
for x in Reminder.objects.all():
x.type = TypeEnum.call if x.audio_url else TypeEnum.message
x.save()
class Migration(migrations.Migration):
dependencies = [
('reminders', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='reminder',
name='type',
field=models.IntegerField(choices=[(b'call', 10), (b'message', 20)], default=10),
preserve_default=False,
),
migrations.RunPython(
migrations.RunPython.noop,
populate_type_field,
),
]
|
<commit_before><commit_msg>Move to a type field<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-16 09:44
from __future__ import unicode_literals
from django.db import migrations, models
def populate_type_field(apps, schema_editor):
from takeyourmeds.reminders.enums import TypeEnum
Reminder = apps.get_model('reminders', 'Reminder')
for x in Reminder.objects.all():
x.type = TypeEnum.call if x.audio_url else TypeEnum.message
x.save()
class Migration(migrations.Migration):
dependencies = [
('reminders', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='reminder',
name='type',
field=models.IntegerField(choices=[(b'call', 10), (b'message', 20)], default=10),
preserve_default=False,
),
migrations.RunPython(
migrations.RunPython.noop,
populate_type_field,
),
]
|
|
b9574fd59e2aba2d538fd11b172b3d14fde29b4d
|
Lib/test/test_readline.py
|
Lib/test/test_readline.py
|
"""
Very minimal unittests for parts of the readline module.
These tests were added to check that the libedit emulation on OSX and
the "real" readline have the same interface for history manipulation. That's
why the tests cover only a small subset of the interface.
"""
import unittest
from test.test_support import run_unittest
import readline
class TestHistoryManipulation (unittest.TestCase):
def testHistoryUpdates(self):
readline.clear_history()
readline.add_history("first line")
readline.add_history("second line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "first line")
self.assertEqual(readline.get_history_item(2), "second line")
readline.replace_history_item(0, "replaced line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "replaced line")
self.assertEqual(readline.get_history_item(2), "second line")
self.assertEqual(readline.get_current_history_length(), 2)
readline.remove_history_item(0)
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "second line")
self.assertEqual(readline.get_current_history_length(), 1)
def test_main():
run_unittest(TestHistoryManipulation)
if __name__ == "__main__":
test_main()
|
Add testcase for readline-using-libedit on OSX
|
Add testcase for readline-using-libedit on OSX
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Add testcase for readline-using-libedit on OSX
|
"""
Very minimal unittests for parts of the readline module.
These tests were added to check that the libedit emulation on OSX and
the "real" readline have the same interface for history manipulation. That's
why the tests cover only a small subset of the interface.
"""
import unittest
from test.test_support import run_unittest
import readline
class TestHistoryManipulation (unittest.TestCase):
def testHistoryUpdates(self):
readline.clear_history()
readline.add_history("first line")
readline.add_history("second line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "first line")
self.assertEqual(readline.get_history_item(2), "second line")
readline.replace_history_item(0, "replaced line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "replaced line")
self.assertEqual(readline.get_history_item(2), "second line")
self.assertEqual(readline.get_current_history_length(), 2)
readline.remove_history_item(0)
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "second line")
self.assertEqual(readline.get_current_history_length(), 1)
def test_main():
run_unittest(TestHistoryManipulation)
if __name__ == "__main__":
test_main()
|
<commit_before><commit_msg>Add testcase for readline-using-libedit on OSX<commit_after>
|
"""
Very minimal unittests for parts of the readline module.
These tests were added to check that the libedit emulation on OSX and
the "real" readline have the same interface for history manipulation. That's
why the tests cover only a small subset of the interface.
"""
import unittest
from test.test_support import run_unittest
import readline
class TestHistoryManipulation (unittest.TestCase):
def testHistoryUpdates(self):
readline.clear_history()
readline.add_history("first line")
readline.add_history("second line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "first line")
self.assertEqual(readline.get_history_item(2), "second line")
readline.replace_history_item(0, "replaced line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "replaced line")
self.assertEqual(readline.get_history_item(2), "second line")
self.assertEqual(readline.get_current_history_length(), 2)
readline.remove_history_item(0)
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "second line")
self.assertEqual(readline.get_current_history_length(), 1)
def test_main():
run_unittest(TestHistoryManipulation)
if __name__ == "__main__":
test_main()
|
Add testcase for readline-using-libedit on OSX"""
Very minimal unittests for parts of the readline module.
These tests were added to check that the libedit emulation on OSX and
the "real" readline have the same interface for history manipulation. That's
why the tests cover only a small subset of the interface.
"""
import unittest
from test.test_support import run_unittest
import readline
class TestHistoryManipulation (unittest.TestCase):
def testHistoryUpdates(self):
readline.clear_history()
readline.add_history("first line")
readline.add_history("second line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "first line")
self.assertEqual(readline.get_history_item(2), "second line")
readline.replace_history_item(0, "replaced line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "replaced line")
self.assertEqual(readline.get_history_item(2), "second line")
self.assertEqual(readline.get_current_history_length(), 2)
readline.remove_history_item(0)
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "second line")
self.assertEqual(readline.get_current_history_length(), 1)
def test_main():
run_unittest(TestHistoryManipulation)
if __name__ == "__main__":
test_main()
|
<commit_before><commit_msg>Add testcase for readline-using-libedit on OSX<commit_after>"""
Very minimal unittests for parts of the readline module.
These tests were added to check that the libedit emulation on OSX and
the "real" readline have the same interface for history manipulation. That's
why the tests cover only a small subset of the interface.
"""
import unittest
from test.test_support import run_unittest
import readline
class TestHistoryManipulation (unittest.TestCase):
def testHistoryUpdates(self):
readline.clear_history()
readline.add_history("first line")
readline.add_history("second line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "first line")
self.assertEqual(readline.get_history_item(2), "second line")
readline.replace_history_item(0, "replaced line")
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "replaced line")
self.assertEqual(readline.get_history_item(2), "second line")
self.assertEqual(readline.get_current_history_length(), 2)
readline.remove_history_item(0)
self.assertEqual(readline.get_history_item(0), None)
self.assertEqual(readline.get_history_item(1), "second line")
self.assertEqual(readline.get_current_history_length(), 1)
def test_main():
run_unittest(TestHistoryManipulation)
if __name__ == "__main__":
test_main()
|
|
47f81e5336676bddb8f02d5cd240128889315d59
|
CodeFights/crackingPassword.py
|
CodeFights/crackingPassword.py
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
from itertools import product
def crackingPassword(digits, k, d):
def createNumber(digs):
return "".join(map(str, digs))
return sorted([s for s in [''.join(digs) for digs in
product(createNumber(digits), repeat=k)] if int(s) % d == 0])
# Alternative solution:
# return list(filter(lambda x: int(x) % d == 0, map(createNumber,
# product(sorted(digits), repeat = k))))
def main():
tests = [
[[1, 5, 2], 2, 3, ["12", "15", "21", "51"]],
[[4, 6, 0, 3], 4, 13,
["0000", "0364", "0403", "0663", "3003", "3406", "3640", "3666",
"4004", "4030", "4043", "4303", "4433", "4446", "6006", "6344",
"6604", "6630", "6643"]],
[[1], 4, 11, ["1111"]],
[[8, 9], 3, 10, []],
[[4, 6, 0], 1, 7, ["0"]],
[[3], 9, 3, ["333333333"]]
]
for t in tests:
res = crackingPassword(t[0], t[1], t[2])
ans = t[3]
if ans == res:
print("PASSED: crackingPassword({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print(("FAILED: crackingPassword({}, {}, {}) returned {},"
"answer: {}").format(t[0], t[1], t[2], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights cracking password problem
|
Solve Code Fights cracking password problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights cracking password problem
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
from itertools import product
def crackingPassword(digits, k, d):
def createNumber(digs):
return "".join(map(str, digs))
return sorted([s for s in [''.join(digs) for digs in
product(createNumber(digits), repeat=k)] if int(s) % d == 0])
# Alternative solution:
# return list(filter(lambda x: int(x) % d == 0, map(createNumber,
# product(sorted(digits), repeat = k))))
def main():
tests = [
[[1, 5, 2], 2, 3, ["12", "15", "21", "51"]],
[[4, 6, 0, 3], 4, 13,
["0000", "0364", "0403", "0663", "3003", "3406", "3640", "3666",
"4004", "4030", "4043", "4303", "4433", "4446", "6006", "6344",
"6604", "6630", "6643"]],
[[1], 4, 11, ["1111"]],
[[8, 9], 3, 10, []],
[[4, 6, 0], 1, 7, ["0"]],
[[3], 9, 3, ["333333333"]]
]
for t in tests:
res = crackingPassword(t[0], t[1], t[2])
ans = t[3]
if ans == res:
print("PASSED: crackingPassword({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print(("FAILED: crackingPassword({}, {}, {}) returned {},"
"answer: {}").format(t[0], t[1], t[2], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights cracking password problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
from itertools import product
def crackingPassword(digits, k, d):
def createNumber(digs):
return "".join(map(str, digs))
return sorted([s for s in [''.join(digs) for digs in
product(createNumber(digits), repeat=k)] if int(s) % d == 0])
# Alternative solution:
# return list(filter(lambda x: int(x) % d == 0, map(createNumber,
# product(sorted(digits), repeat = k))))
def main():
tests = [
[[1, 5, 2], 2, 3, ["12", "15", "21", "51"]],
[[4, 6, 0, 3], 4, 13,
["0000", "0364", "0403", "0663", "3003", "3406", "3640", "3666",
"4004", "4030", "4043", "4303", "4433", "4446", "6006", "6344",
"6604", "6630", "6643"]],
[[1], 4, 11, ["1111"]],
[[8, 9], 3, 10, []],
[[4, 6, 0], 1, 7, ["0"]],
[[3], 9, 3, ["333333333"]]
]
for t in tests:
res = crackingPassword(t[0], t[1], t[2])
ans = t[3]
if ans == res:
print("PASSED: crackingPassword({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print(("FAILED: crackingPassword({}, {}, {}) returned {},"
"answer: {}").format(t[0], t[1], t[2], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights cracking password problem#!/usr/local/bin/python
# Code Fights Cracking Password Problem
from itertools import product
def crackingPassword(digits, k, d):
def createNumber(digs):
return "".join(map(str, digs))
return sorted([s for s in [''.join(digs) for digs in
product(createNumber(digits), repeat=k)] if int(s) % d == 0])
# Alternative solution:
# return list(filter(lambda x: int(x) % d == 0, map(createNumber,
# product(sorted(digits), repeat = k))))
def main():
tests = [
[[1, 5, 2], 2, 3, ["12", "15", "21", "51"]],
[[4, 6, 0, 3], 4, 13,
["0000", "0364", "0403", "0663", "3003", "3406", "3640", "3666",
"4004", "4030", "4043", "4303", "4433", "4446", "6006", "6344",
"6604", "6630", "6643"]],
[[1], 4, 11, ["1111"]],
[[8, 9], 3, 10, []],
[[4, 6, 0], 1, 7, ["0"]],
[[3], 9, 3, ["333333333"]]
]
for t in tests:
res = crackingPassword(t[0], t[1], t[2])
ans = t[3]
if ans == res:
print("PASSED: crackingPassword({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print(("FAILED: crackingPassword({}, {}, {}) returned {},"
"answer: {}").format(t[0], t[1], t[2], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights cracking password problem<commit_after>#!/usr/local/bin/python
# Code Fights Cracking Password Problem
from itertools import product
def crackingPassword(digits, k, d):
def createNumber(digs):
return "".join(map(str, digs))
return sorted([s for s in [''.join(digs) for digs in
product(createNumber(digits), repeat=k)] if int(s) % d == 0])
# Alternative solution:
# return list(filter(lambda x: int(x) % d == 0, map(createNumber,
# product(sorted(digits), repeat = k))))
def main():
tests = [
[[1, 5, 2], 2, 3, ["12", "15", "21", "51"]],
[[4, 6, 0, 3], 4, 13,
["0000", "0364", "0403", "0663", "3003", "3406", "3640", "3666",
"4004", "4030", "4043", "4303", "4433", "4446", "6006", "6344",
"6604", "6630", "6643"]],
[[1], 4, 11, ["1111"]],
[[8, 9], 3, 10, []],
[[4, 6, 0], 1, 7, ["0"]],
[[3], 9, 3, ["333333333"]]
]
for t in tests:
res = crackingPassword(t[0], t[1], t[2])
ans = t[3]
if ans == res:
print("PASSED: crackingPassword({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print(("FAILED: crackingPassword({}, {}, {}) returned {},"
"answer: {}").format(t[0], t[1], t[2], res, ans))
if __name__ == '__main__':
main()
|
|
24709b6221c43ef61699b2055a83108dc2e739db
|
reddit_adzerk/tests/unit/test_adzerkpromote.py
|
reddit_adzerk/tests/unit/test_adzerkpromote.py
|
from mock import MagicMock, Mock, patch
from random import randint
from r2.tests import RedditTestCase
from reddit_adzerk.adzerkpromote import flight_is_active
class TestIsActive(RedditTestCase):
def test_flight_is_active(self):
"""
Assert that `flight_is_active` returns `True` only if all kwargs are
`False`.
"""
kwarg_keys = (
'needs_approval',
'is_paused',
'needs_payment',
'is_terminated',
'is_deleted',
'is_overdelivered',
)
kwargs = dict()
for key in kwarg_keys:
kwargs[key] = False
# Should return True only if all kwargs have value of False
self.assertTrue(flight_is_active(**kwargs))
# If any kwarg value is True, flight_is_active should return False
random_kwarg_key = kwarg_keys[randint(0, len(kwarg_keys) - 1)]
kwargs[random_kwarg_key] = True
self.assertFalse(flight_is_active(**kwargs))
# If all kwarg values are True, flight_is_active should return False
for key in kwarg_keys:
kwargs[key] = True
self.assertFalse(flight_is_active(**kwargs))
|
Add unit tests for flight_is_active
|
Add unit tests for flight_is_active
|
Python
|
bsd-3-clause
|
madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk
|
Add unit tests for flight_is_active
|
from mock import MagicMock, Mock, patch
from random import randint
from r2.tests import RedditTestCase
from reddit_adzerk.adzerkpromote import flight_is_active
class TestIsActive(RedditTestCase):
def test_flight_is_active(self):
"""
Assert that `flight_is_active` returns `True` only if all kwargs are
`False`.
"""
kwarg_keys = (
'needs_approval',
'is_paused',
'needs_payment',
'is_terminated',
'is_deleted',
'is_overdelivered',
)
kwargs = dict()
for key in kwarg_keys:
kwargs[key] = False
# Should return True only if all kwargs have value of False
self.assertTrue(flight_is_active(**kwargs))
# If any kwarg value is True, flight_is_active should return False
random_kwarg_key = kwarg_keys[randint(0, len(kwarg_keys) - 1)]
kwargs[random_kwarg_key] = True
self.assertFalse(flight_is_active(**kwargs))
# If all kwarg values are True, flight_is_active should return False
for key in kwarg_keys:
kwargs[key] = True
self.assertFalse(flight_is_active(**kwargs))
|
<commit_before><commit_msg>Add unit tests for flight_is_active<commit_after>
|
from mock import MagicMock, Mock, patch
from random import randint
from r2.tests import RedditTestCase
from reddit_adzerk.adzerkpromote import flight_is_active
class TestIsActive(RedditTestCase):
def test_flight_is_active(self):
"""
Assert that `flight_is_active` returns `True` only if all kwargs are
`False`.
"""
kwarg_keys = (
'needs_approval',
'is_paused',
'needs_payment',
'is_terminated',
'is_deleted',
'is_overdelivered',
)
kwargs = dict()
for key in kwarg_keys:
kwargs[key] = False
# Should return True only if all kwargs have value of False
self.assertTrue(flight_is_active(**kwargs))
# If any kwarg value is True, flight_is_active should return False
random_kwarg_key = kwarg_keys[randint(0, len(kwarg_keys) - 1)]
kwargs[random_kwarg_key] = True
self.assertFalse(flight_is_active(**kwargs))
# If all kwarg values are True, flight_is_active should return False
for key in kwarg_keys:
kwargs[key] = True
self.assertFalse(flight_is_active(**kwargs))
|
Add unit tests for flight_is_activefrom mock import MagicMock, Mock, patch
from random import randint
from r2.tests import RedditTestCase
from reddit_adzerk.adzerkpromote import flight_is_active
class TestIsActive(RedditTestCase):
def test_flight_is_active(self):
"""
Assert that `flight_is_active` returns `True` only if all kwargs are
`False`.
"""
kwarg_keys = (
'needs_approval',
'is_paused',
'needs_payment',
'is_terminated',
'is_deleted',
'is_overdelivered',
)
kwargs = dict()
for key in kwarg_keys:
kwargs[key] = False
# Should return True only if all kwargs have value of False
self.assertTrue(flight_is_active(**kwargs))
# If any kwarg value is True, flight_is_active should return False
random_kwarg_key = kwarg_keys[randint(0, len(kwarg_keys) - 1)]
kwargs[random_kwarg_key] = True
self.assertFalse(flight_is_active(**kwargs))
# If all kwarg values are True, flight_is_active should return False
for key in kwarg_keys:
kwargs[key] = True
self.assertFalse(flight_is_active(**kwargs))
|
<commit_before><commit_msg>Add unit tests for flight_is_active<commit_after>from mock import MagicMock, Mock, patch
from random import randint
from r2.tests import RedditTestCase
from reddit_adzerk.adzerkpromote import flight_is_active
class TestIsActive(RedditTestCase):
def test_flight_is_active(self):
"""
Assert that `flight_is_active` returns `True` only if all kwargs are
`False`.
"""
kwarg_keys = (
'needs_approval',
'is_paused',
'needs_payment',
'is_terminated',
'is_deleted',
'is_overdelivered',
)
kwargs = dict()
for key in kwarg_keys:
kwargs[key] = False
# Should return True only if all kwargs have value of False
self.assertTrue(flight_is_active(**kwargs))
# If any kwarg value is True, flight_is_active should return False
random_kwarg_key = kwarg_keys[randint(0, len(kwarg_keys) - 1)]
kwargs[random_kwarg_key] = True
self.assertFalse(flight_is_active(**kwargs))
# If all kwarg values are True, flight_is_active should return False
for key in kwarg_keys:
kwargs[key] = True
self.assertFalse(flight_is_active(**kwargs))
|
|
b10202ab7e5c79af8b0925ceed9ccbc95e52c7a0
|
pipeline_docs/pipeline_intervals/trackers/GeneProfile.py
|
pipeline_docs/pipeline_intervals/trackers/GeneProfile.py
|
import os, sys, re, types, itertools
import matplotlib.pyplot as plt
import numpy
import numpy.ma
import Stats
import Histogram
import Annotations
from SphinxReport.Tracker import *
from IntervalReport import *
##################################################################################
##################################################################################
##################################################################################
## Make line plots of the gene read profiles .....
##################################################################################
class MultiProfilePlot(Annotations.AnnotationSlicer, IntervalTracker):
"""for each interval, return the GC percentage and the number of counts"""
mXLabel = "distance / bases"
mPattern = "_intervals$"
mColumn1 = "upstream"
mColumn2 = "exons"
mColumn3 = "downstream"
mTable1 = "withoverlap_geneprofile_counts"
mTable2 = "woutoverlap_geneprofile_counts"
mAnnotations = "annotations"
def __call__(self, track, slice = None ):
annotations = self.mAnnotations
table1 = self.mTable1
table2 = self.mTable2
column1 = self.mColumn1
column2 = self.mColumn2
column3 = self.mColumn3
if not slice or slice == "all":
withdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table1)s""" % locals() )
woutdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table2)s""" % locals() )
withd = withdata[column1]+withdata[column2]+withdata[column3]
witho = woutdata[column1]+woutdata[column2]+woutdata[column3]
wd = odict( (("position",range(0,len(withd))),("density",withd)))
wo = odict( (("position",range(0,len(witho))),("density",witho)))
data = odict( (("with_overlap",wd), ("without_overlap",wo)) )
return data
|
Add new tracker for by overlap status gene profile plots in pipeline_intervals
|
Add new tracker for by overlap status gene profile plots in pipeline_intervals
|
Python
|
mit
|
CGATOxford/CGATPipelines,CGATOxford/CGATPipelines,AntonioJBT/CGATPipeline_core,AntonioJBT/CGATPipeline_core,CGATOxford/CGATPipelines,AntonioJBT/CGATPipeline_core,CGATOxford/CGATPipelines,CGATOxford/CGATPipelines,AntonioJBT/CGATPipeline_core
|
Add new tracker for by overlap status gene profile plots in pipeline_intervals
|
import os, sys, re, types, itertools
import matplotlib.pyplot as plt
import numpy
import numpy.ma
import Stats
import Histogram
import Annotations
from SphinxReport.Tracker import *
from IntervalReport import *
##################################################################################
##################################################################################
##################################################################################
## Make line plots of the gene read profiles .....
##################################################################################
class MultiProfilePlot(Annotations.AnnotationSlicer, IntervalTracker):
"""for each interval, return the GC percentage and the number of counts"""
mXLabel = "distance / bases"
mPattern = "_intervals$"
mColumn1 = "upstream"
mColumn2 = "exons"
mColumn3 = "downstream"
mTable1 = "withoverlap_geneprofile_counts"
mTable2 = "woutoverlap_geneprofile_counts"
mAnnotations = "annotations"
def __call__(self, track, slice = None ):
annotations = self.mAnnotations
table1 = self.mTable1
table2 = self.mTable2
column1 = self.mColumn1
column2 = self.mColumn2
column3 = self.mColumn3
if not slice or slice == "all":
withdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table1)s""" % locals() )
woutdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table2)s""" % locals() )
withd = withdata[column1]+withdata[column2]+withdata[column3]
witho = woutdata[column1]+woutdata[column2]+woutdata[column3]
wd = odict( (("position",range(0,len(withd))),("density",withd)))
wo = odict( (("position",range(0,len(witho))),("density",witho)))
data = odict( (("with_overlap",wd), ("without_overlap",wo)) )
return data
|
<commit_before><commit_msg>Add new tracker for by overlap status gene profile plots in pipeline_intervals<commit_after>
|
import os, sys, re, types, itertools
import matplotlib.pyplot as plt
import numpy
import numpy.ma
import Stats
import Histogram
import Annotations
from SphinxReport.Tracker import *
from IntervalReport import *
##################################################################################
##################################################################################
##################################################################################
## Make line plots of the gene read profiles .....
##################################################################################
class MultiProfilePlot(Annotations.AnnotationSlicer, IntervalTracker):
"""for each interval, return the GC percentage and the number of counts"""
mXLabel = "distance / bases"
mPattern = "_intervals$"
mColumn1 = "upstream"
mColumn2 = "exons"
mColumn3 = "downstream"
mTable1 = "withoverlap_geneprofile_counts"
mTable2 = "woutoverlap_geneprofile_counts"
mAnnotations = "annotations"
def __call__(self, track, slice = None ):
annotations = self.mAnnotations
table1 = self.mTable1
table2 = self.mTable2
column1 = self.mColumn1
column2 = self.mColumn2
column3 = self.mColumn3
if not slice or slice == "all":
withdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table1)s""" % locals() )
woutdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table2)s""" % locals() )
withd = withdata[column1]+withdata[column2]+withdata[column3]
witho = woutdata[column1]+woutdata[column2]+woutdata[column3]
wd = odict( (("position",range(0,len(withd))),("density",withd)))
wo = odict( (("position",range(0,len(witho))),("density",witho)))
data = odict( (("with_overlap",wd), ("without_overlap",wo)) )
return data
|
Add new tracker for by overlap status gene profile plots in pipeline_intervalsimport os, sys, re, types, itertools
import matplotlib.pyplot as plt
import numpy
import numpy.ma
import Stats
import Histogram
import Annotations
from SphinxReport.Tracker import *
from IntervalReport import *
##################################################################################
##################################################################################
##################################################################################
## Make line plots of the gene read profiles .....
##################################################################################
class MultiProfilePlot(Annotations.AnnotationSlicer, IntervalTracker):
"""for each interval, return the GC percentage and the number of counts"""
mXLabel = "distance / bases"
mPattern = "_intervals$"
mColumn1 = "upstream"
mColumn2 = "exons"
mColumn3 = "downstream"
mTable1 = "withoverlap_geneprofile_counts"
mTable2 = "woutoverlap_geneprofile_counts"
mAnnotations = "annotations"
def __call__(self, track, slice = None ):
annotations = self.mAnnotations
table1 = self.mTable1
table2 = self.mTable2
column1 = self.mColumn1
column2 = self.mColumn2
column3 = self.mColumn3
if not slice or slice == "all":
withdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table1)s""" % locals() )
woutdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table2)s""" % locals() )
withd = withdata[column1]+withdata[column2]+withdata[column3]
witho = woutdata[column1]+woutdata[column2]+woutdata[column3]
wd = odict( (("position",range(0,len(withd))),("density",withd)))
wo = odict( (("position",range(0,len(witho))),("density",witho)))
data = odict( (("with_overlap",wd), ("without_overlap",wo)) )
return data
|
<commit_before><commit_msg>Add new tracker for by overlap status gene profile plots in pipeline_intervals<commit_after>import os, sys, re, types, itertools
import matplotlib.pyplot as plt
import numpy
import numpy.ma
import Stats
import Histogram
import Annotations
from SphinxReport.Tracker import *
from IntervalReport import *
##################################################################################
##################################################################################
##################################################################################
## Make line plots of the gene read profiles .....
##################################################################################
class MultiProfilePlot(Annotations.AnnotationSlicer, IntervalTracker):
"""for each interval, return the GC percentage and the number of counts"""
mXLabel = "distance / bases"
mPattern = "_intervals$"
mColumn1 = "upstream"
mColumn2 = "exons"
mColumn3 = "downstream"
mTable1 = "withoverlap_geneprofile_counts"
mTable2 = "woutoverlap_geneprofile_counts"
mAnnotations = "annotations"
def __call__(self, track, slice = None ):
annotations = self.mAnnotations
table1 = self.mTable1
table2 = self.mTable2
column1 = self.mColumn1
column2 = self.mColumn2
column3 = self.mColumn3
if not slice or slice == "all":
withdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table1)s""" % locals() )
woutdata = self.getAll( """SELECT %(column1)s,%(column2)s,%(column3)s FROM %(track)s_%(table2)s""" % locals() )
withd = withdata[column1]+withdata[column2]+withdata[column3]
witho = woutdata[column1]+woutdata[column2]+woutdata[column3]
wd = odict( (("position",range(0,len(withd))),("density",withd)))
wo = odict( (("position",range(0,len(witho))),("density",witho)))
data = odict( (("with_overlap",wd), ("without_overlap",wo)) )
return data
|
|
887c0416b90d467a809377546a73280560af0ba9
|
casepro/cases/migrations/0040_case_user_assignee.py
|
casepro/cases/migrations/0040_case_user_assignee.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0039_populate_case_watchers'),
]
operations = [
migrations.AddField(
model_name='case',
name='user_assignee',
field=models.ForeignKey(related_name='cases', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, help_text='The (optional) user that this case is assigned to', null=True),
),
]
|
Add migration for user_assignee Case field
|
Add migration for user_assignee Case field
|
Python
|
bsd-3-clause
|
praekelt/casepro,praekelt/casepro,praekelt/casepro
|
Add migration for user_assignee Case field
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0039_populate_case_watchers'),
]
operations = [
migrations.AddField(
model_name='case',
name='user_assignee',
field=models.ForeignKey(related_name='cases', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, help_text='The (optional) user that this case is assigned to', null=True),
),
]
|
<commit_before><commit_msg>Add migration for user_assignee Case field<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0039_populate_case_watchers'),
]
operations = [
migrations.AddField(
model_name='case',
name='user_assignee',
field=models.ForeignKey(related_name='cases', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, help_text='The (optional) user that this case is assigned to', null=True),
),
]
|
Add migration for user_assignee Case field# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0039_populate_case_watchers'),
]
operations = [
migrations.AddField(
model_name='case',
name='user_assignee',
field=models.ForeignKey(related_name='cases', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, help_text='The (optional) user that this case is assigned to', null=True),
),
]
|
<commit_before><commit_msg>Add migration for user_assignee Case field<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cases', '0039_populate_case_watchers'),
]
operations = [
migrations.AddField(
model_name='case',
name='user_assignee',
field=models.ForeignKey(related_name='cases', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, help_text='The (optional) user that this case is assigned to', null=True),
),
]
|
|
93b95bd4e902e614c4d7f67627fef8b771388152
|
analysis_ps_ring_simus.py
|
analysis_ps_ring_simus.py
|
import h5manager as hm
import tables
import matplotlib.pyplot as plt
import numpy as np
def main(dbfile):
# Get the simulations
db = tables.openFile(dbfile)
simus = hm.get_first_level_groups(db.root)
# Define some function to get specific result values
def get_strength(simu):
return hm.get_group_attr(simu, ('paramset', '_v_attrs', 'Common', 'inter_conn_strength', 0, 0))
def get_mps(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'MPS', 'whole'))
def get_sts(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'STS', 'whole'))
def get_fftmax(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'FFTMAX', 'mean'))
get_indexes = (get_strength, get_mps, get_sts, get_fftmax)
# Get simulation indexes for each simulation
res_indexes = np.ndarray((len(simus), len(get_indexes)))
for i_simu, simu in enumerate(simus):
for i_index, get_index in enumerate(get_indexes):
res_indexes[i_simu, i_index] = get_index(simu)
# Plot the res_indexes against interconnection strength
plt.figure()
plt.plot(res_indexes[:, 0], res_indexes[:, 1], '.', label="MPS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 2], '.', label="STS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 3], '.', label="FFTMAX (mean)")
plt.legend()
plt.show()
db.close()
if __name__ == '__main__':
from sys import argv
db = argv[1]
main(db)
|
Add a script to analyse ring topology simulations
|
Add a script to analyse ring topology simulations
|
Python
|
mit
|
neuro-lyon/multiglom-model,neuro-lyon/multiglom-model
|
Add a script to analyse ring topology simulations
|
import h5manager as hm
import tables
import matplotlib.pyplot as plt
import numpy as np
def main(dbfile):
# Get the simulations
db = tables.openFile(dbfile)
simus = hm.get_first_level_groups(db.root)
# Define some function to get specific result values
def get_strength(simu):
return hm.get_group_attr(simu, ('paramset', '_v_attrs', 'Common', 'inter_conn_strength', 0, 0))
def get_mps(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'MPS', 'whole'))
def get_sts(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'STS', 'whole'))
def get_fftmax(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'FFTMAX', 'mean'))
get_indexes = (get_strength, get_mps, get_sts, get_fftmax)
# Get simulation indexes for each simulation
res_indexes = np.ndarray((len(simus), len(get_indexes)))
for i_simu, simu in enumerate(simus):
for i_index, get_index in enumerate(get_indexes):
res_indexes[i_simu, i_index] = get_index(simu)
# Plot the res_indexes against interconnection strength
plt.figure()
plt.plot(res_indexes[:, 0], res_indexes[:, 1], '.', label="MPS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 2], '.', label="STS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 3], '.', label="FFTMAX (mean)")
plt.legend()
plt.show()
db.close()
if __name__ == '__main__':
from sys import argv
db = argv[1]
main(db)
|
<commit_before><commit_msg>Add a script to analyse ring topology simulations<commit_after>
|
import h5manager as hm
import tables
import matplotlib.pyplot as plt
import numpy as np
def main(dbfile):
# Get the simulations
db = tables.openFile(dbfile)
simus = hm.get_first_level_groups(db.root)
# Define some function to get specific result values
def get_strength(simu):
return hm.get_group_attr(simu, ('paramset', '_v_attrs', 'Common', 'inter_conn_strength', 0, 0))
def get_mps(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'MPS', 'whole'))
def get_sts(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'STS', 'whole'))
def get_fftmax(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'FFTMAX', 'mean'))
get_indexes = (get_strength, get_mps, get_sts, get_fftmax)
# Get simulation indexes for each simulation
res_indexes = np.ndarray((len(simus), len(get_indexes)))
for i_simu, simu in enumerate(simus):
for i_index, get_index in enumerate(get_indexes):
res_indexes[i_simu, i_index] = get_index(simu)
# Plot the res_indexes against interconnection strength
plt.figure()
plt.plot(res_indexes[:, 0], res_indexes[:, 1], '.', label="MPS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 2], '.', label="STS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 3], '.', label="FFTMAX (mean)")
plt.legend()
plt.show()
db.close()
if __name__ == '__main__':
from sys import argv
db = argv[1]
main(db)
|
Add a script to analyse ring topology simulationsimport h5manager as hm
import tables
import matplotlib.pyplot as plt
import numpy as np
def main(dbfile):
# Get the simulations
db = tables.openFile(dbfile)
simus = hm.get_first_level_groups(db.root)
# Define some function to get specific result values
def get_strength(simu):
return hm.get_group_attr(simu, ('paramset', '_v_attrs', 'Common', 'inter_conn_strength', 0, 0))
def get_mps(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'MPS', 'whole'))
def get_sts(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'STS', 'whole'))
def get_fftmax(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'FFTMAX', 'mean'))
get_indexes = (get_strength, get_mps, get_sts, get_fftmax)
# Get simulation indexes for each simulation
res_indexes = np.ndarray((len(simus), len(get_indexes)))
for i_simu, simu in enumerate(simus):
for i_index, get_index in enumerate(get_indexes):
res_indexes[i_simu, i_index] = get_index(simu)
# Plot the res_indexes against interconnection strength
plt.figure()
plt.plot(res_indexes[:, 0], res_indexes[:, 1], '.', label="MPS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 2], '.', label="STS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 3], '.', label="FFTMAX (mean)")
plt.legend()
plt.show()
db.close()
if __name__ == '__main__':
from sys import argv
db = argv[1]
main(db)
|
<commit_before><commit_msg>Add a script to analyse ring topology simulations<commit_after>import h5manager as hm
import tables
import matplotlib.pyplot as plt
import numpy as np
def main(dbfile):
# Get the simulations
db = tables.openFile(dbfile)
simus = hm.get_first_level_groups(db.root)
# Define some function to get specific result values
def get_strength(simu):
return hm.get_group_attr(simu, ('paramset', '_v_attrs', 'Common', 'inter_conn_strength', 0, 0))
def get_mps(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'MPS', 'whole'))
def get_sts(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'STS', 'whole'))
def get_fftmax(simu):
return hm.get_group_attr(simu, ('results', '_v_attrs', 'FFTMAX', 'mean'))
get_indexes = (get_strength, get_mps, get_sts, get_fftmax)
# Get simulation indexes for each simulation
res_indexes = np.ndarray((len(simus), len(get_indexes)))
for i_simu, simu in enumerate(simus):
for i_index, get_index in enumerate(get_indexes):
res_indexes[i_simu, i_index] = get_index(simu)
# Plot the res_indexes against interconnection strength
plt.figure()
plt.plot(res_indexes[:, 0], res_indexes[:, 1], '.', label="MPS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 2], '.', label="STS (whole)")
plt.plot(res_indexes[:, 0], res_indexes[:, 3], '.', label="FFTMAX (mean)")
plt.legend()
plt.show()
db.close()
if __name__ == '__main__':
from sys import argv
db = argv[1]
main(db)
|
|
5e8a7202e1eb7ecc0609d5b5fa851b3aa98cb989
|
ledgerblue/loadMCU.py
|
ledgerblue/loadMCU.py
|
"""
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .hexParser import IntelHexParser
from .hexLoader import HexLoader
from .comm import getDongle
import argparse
def auto_int(x):
return int(x, 0)
parser = argparse.ArgumentParser()
parser.add_argument("--targetId", help="Set the chip target ID", type=auto_int)
parser.add_argument("--fileName", help="Set the file name to load")
parser.add_argument("--bootAddr", help="Set the boot address", type=auto_int)
parser.add_argument("--apdu", help="Display APDU log", action='store_true')
args = parser.parse_args()
if args.targetId == None:
raise Exception("Missing targetId")
if args.fileName == None:
raise Exception("Missing fileName")
parser = IntelHexParser(args.fileName)
if args.bootAddr == None:
args.bootAddr = parser.getBootAddr()
dongle = getDongle(args.apdu)
#relative load
loader = HexLoader(dongle, 0xe0, False, None, False)
loader.validateTargetId(args.targetId)
hash = loader.load(0xFF, 0xF0, parser.getAreas(), args.bootAddr)
loader.run(parser.getAreas(), args.bootAddr)
|
Add standalone MCU load support
|
Add standalone MCU load support
|
Python
|
apache-2.0
|
LedgerHQ/blue-loader-python
|
Add standalone MCU load support
|
"""
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .hexParser import IntelHexParser
from .hexLoader import HexLoader
from .comm import getDongle
import argparse
def auto_int(x):
return int(x, 0)
parser = argparse.ArgumentParser()
parser.add_argument("--targetId", help="Set the chip target ID", type=auto_int)
parser.add_argument("--fileName", help="Set the file name to load")
parser.add_argument("--bootAddr", help="Set the boot address", type=auto_int)
parser.add_argument("--apdu", help="Display APDU log", action='store_true')
args = parser.parse_args()
if args.targetId == None:
raise Exception("Missing targetId")
if args.fileName == None:
raise Exception("Missing fileName")
parser = IntelHexParser(args.fileName)
if args.bootAddr == None:
args.bootAddr = parser.getBootAddr()
dongle = getDongle(args.apdu)
#relative load
loader = HexLoader(dongle, 0xe0, False, None, False)
loader.validateTargetId(args.targetId)
hash = loader.load(0xFF, 0xF0, parser.getAreas(), args.bootAddr)
loader.run(parser.getAreas(), args.bootAddr)
|
<commit_before><commit_msg>Add standalone MCU load support<commit_after>
|
"""
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .hexParser import IntelHexParser
from .hexLoader import HexLoader
from .comm import getDongle
import argparse
def auto_int(x):
return int(x, 0)
parser = argparse.ArgumentParser()
parser.add_argument("--targetId", help="Set the chip target ID", type=auto_int)
parser.add_argument("--fileName", help="Set the file name to load")
parser.add_argument("--bootAddr", help="Set the boot address", type=auto_int)
parser.add_argument("--apdu", help="Display APDU log", action='store_true')
args = parser.parse_args()
if args.targetId == None:
raise Exception("Missing targetId")
if args.fileName == None:
raise Exception("Missing fileName")
parser = IntelHexParser(args.fileName)
if args.bootAddr == None:
args.bootAddr = parser.getBootAddr()
dongle = getDongle(args.apdu)
#relative load
loader = HexLoader(dongle, 0xe0, False, None, False)
loader.validateTargetId(args.targetId)
hash = loader.load(0xFF, 0xF0, parser.getAreas(), args.bootAddr)
loader.run(parser.getAreas(), args.bootAddr)
|
Add standalone MCU load support"""
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .hexParser import IntelHexParser
from .hexLoader import HexLoader
from .comm import getDongle
import argparse
def auto_int(x):
return int(x, 0)
parser = argparse.ArgumentParser()
parser.add_argument("--targetId", help="Set the chip target ID", type=auto_int)
parser.add_argument("--fileName", help="Set the file name to load")
parser.add_argument("--bootAddr", help="Set the boot address", type=auto_int)
parser.add_argument("--apdu", help="Display APDU log", action='store_true')
args = parser.parse_args()
if args.targetId == None:
raise Exception("Missing targetId")
if args.fileName == None:
raise Exception("Missing fileName")
parser = IntelHexParser(args.fileName)
if args.bootAddr == None:
args.bootAddr = parser.getBootAddr()
dongle = getDongle(args.apdu)
#relative load
loader = HexLoader(dongle, 0xe0, False, None, False)
loader.validateTargetId(args.targetId)
hash = loader.load(0xFF, 0xF0, parser.getAreas(), args.bootAddr)
loader.run(parser.getAreas(), args.bootAddr)
|
<commit_before><commit_msg>Add standalone MCU load support<commit_after>"""
*******************************************************************************
* Ledger Blue
* (c) 2016 Ledger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
********************************************************************************
"""
from .hexParser import IntelHexParser
from .hexLoader import HexLoader
from .comm import getDongle
import argparse
def auto_int(x):
return int(x, 0)
parser = argparse.ArgumentParser()
parser.add_argument("--targetId", help="Set the chip target ID", type=auto_int)
parser.add_argument("--fileName", help="Set the file name to load")
parser.add_argument("--bootAddr", help="Set the boot address", type=auto_int)
parser.add_argument("--apdu", help="Display APDU log", action='store_true')
args = parser.parse_args()
if args.targetId == None:
raise Exception("Missing targetId")
if args.fileName == None:
raise Exception("Missing fileName")
parser = IntelHexParser(args.fileName)
if args.bootAddr == None:
args.bootAddr = parser.getBootAddr()
dongle = getDongle(args.apdu)
#relative load
loader = HexLoader(dongle, 0xe0, False, None, False)
loader.validateTargetId(args.targetId)
hash = loader.load(0xFF, 0xF0, parser.getAreas(), args.bootAddr)
loader.run(parser.getAreas(), args.bootAddr)
|
|
83df070adca7b1548ed314ab4fb2782f44ef31bc
|
museum_site/migrations/0044_auto_20210118_1801.py
|
museum_site/migrations/0044_auto_20210118_1801.py
|
# Generated by Django 3.0.7 on 2021-01-18 18:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0043_auto_20201022_0242'),
]
operations = [
migrations.AddField(
model_name='article',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='file',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='article',
name='date',
field=models.DateField(default='1970-01-01'),
),
]
|
Add "spotlight" field to article/file models
|
Add "spotlight" field to article/file models
|
Python
|
mit
|
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
|
Add "spotlight" field to article/file models
|
# Generated by Django 3.0.7 on 2021-01-18 18:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0043_auto_20201022_0242'),
]
operations = [
migrations.AddField(
model_name='article',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='file',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='article',
name='date',
field=models.DateField(default='1970-01-01'),
),
]
|
<commit_before><commit_msg>Add "spotlight" field to article/file models<commit_after>
|
# Generated by Django 3.0.7 on 2021-01-18 18:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0043_auto_20201022_0242'),
]
operations = [
migrations.AddField(
model_name='article',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='file',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='article',
name='date',
field=models.DateField(default='1970-01-01'),
),
]
|
Add "spotlight" field to article/file models# Generated by Django 3.0.7 on 2021-01-18 18:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0043_auto_20201022_0242'),
]
operations = [
migrations.AddField(
model_name='article',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='file',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='article',
name='date',
field=models.DateField(default='1970-01-01'),
),
]
|
<commit_before><commit_msg>Add "spotlight" field to article/file models<commit_after># Generated by Django 3.0.7 on 2021-01-18 18:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0043_auto_20201022_0242'),
]
operations = [
migrations.AddField(
model_name='article',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='file',
name='spotlight',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='article',
name='date',
field=models.DateField(default='1970-01-01'),
),
]
|
|
80bedcde7264c5eaddcd1e23dcbf56e41e52c21f
|
daemon.py
|
daemon.py
|
#!/usr/bin/env python
import config
import events
cfg = config.Config()
cfg.load(["/etc/rally-ci/", "etc/rally-ci"])
handler = events.EventHandler(cfg)
handler.loop()
|
Add main script for running Rally CI
|
Add main script for running Rally CI
|
Python
|
apache-2.0
|
redixin/rally-ci,aarexer/rally-ci,redixin/rally-ci,aarexer/rally-ci,redixin/rally-ci
|
Add main script for running Rally CI
|
#!/usr/bin/env python
import config
import events
cfg = config.Config()
cfg.load(["/etc/rally-ci/", "etc/rally-ci"])
handler = events.EventHandler(cfg)
handler.loop()
|
<commit_before><commit_msg>Add main script for running Rally CI<commit_after>
|
#!/usr/bin/env python
import config
import events
cfg = config.Config()
cfg.load(["/etc/rally-ci/", "etc/rally-ci"])
handler = events.EventHandler(cfg)
handler.loop()
|
Add main script for running Rally CI#!/usr/bin/env python
import config
import events
cfg = config.Config()
cfg.load(["/etc/rally-ci/", "etc/rally-ci"])
handler = events.EventHandler(cfg)
handler.loop()
|
<commit_before><commit_msg>Add main script for running Rally CI<commit_after>#!/usr/bin/env python
import config
import events
cfg = config.Config()
cfg.load(["/etc/rally-ci/", "etc/rally-ci"])
handler = events.EventHandler(cfg)
handler.loop()
|
|
5b9a92a77fb2830d904e239f08c444e0c2cd2e6c
|
wheelcms_axle/tests/test_middleware.py
|
wheelcms_axle/tests/test_middleware.py
|
import mock
from wheelcms_axle.middleware import FixMessageMiddleware
class TestFixMessageMiddleware(object):
def test_oldstyle(self):
""" mock CookieStorage raising IndexError """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", side_effect=IndexError):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' not in request.COOKIES
def test_newstyle(self):
""" mock ordinary execution """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", return_value=None):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' in request.COOKIES
|
Add middleware, if not for keeping the coverage up :)
|
Add middleware, if not for keeping the coverage up :)
|
Python
|
bsd-2-clause
|
wheelcms/wheelcms_axle,wheelcms/wheelcms_axle,wheelcms/wheelcms_axle,wheelcms/wheelcms_axle
|
Add middleware, if not for keeping the coverage up :)
|
import mock
from wheelcms_axle.middleware import FixMessageMiddleware
class TestFixMessageMiddleware(object):
def test_oldstyle(self):
""" mock CookieStorage raising IndexError """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", side_effect=IndexError):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' not in request.COOKIES
def test_newstyle(self):
""" mock ordinary execution """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", return_value=None):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' in request.COOKIES
|
<commit_before><commit_msg>Add middleware, if not for keeping the coverage up :)<commit_after>
|
import mock
from wheelcms_axle.middleware import FixMessageMiddleware
class TestFixMessageMiddleware(object):
def test_oldstyle(self):
""" mock CookieStorage raising IndexError """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", side_effect=IndexError):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' not in request.COOKIES
def test_newstyle(self):
""" mock ordinary execution """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", return_value=None):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' in request.COOKIES
|
Add middleware, if not for keeping the coverage up :)import mock
from wheelcms_axle.middleware import FixMessageMiddleware
class TestFixMessageMiddleware(object):
def test_oldstyle(self):
""" mock CookieStorage raising IndexError """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", side_effect=IndexError):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' not in request.COOKIES
def test_newstyle(self):
""" mock ordinary execution """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", return_value=None):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' in request.COOKIES
|
<commit_before><commit_msg>Add middleware, if not for keeping the coverage up :)<commit_after>import mock
from wheelcms_axle.middleware import FixMessageMiddleware
class TestFixMessageMiddleware(object):
def test_oldstyle(self):
""" mock CookieStorage raising IndexError """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", side_effect=IndexError):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' not in request.COOKIES
def test_newstyle(self):
""" mock ordinary execution """
with mock.patch("django.contrib.messages.storage.cookie"
".CookieStorage._decode", return_value=None):
request = mock.Mock(COOKIES={'messages': 'dummy'})
FixMessageMiddleware().process_request(request)
assert 'messages' in request.COOKIES
|
|
c9ae5f97e59b323d4172f69022eca40e4039b6f0
|
corehq/apps/hqcase/bulk.py
|
corehq/apps/hqcase/bulk.py
|
from xml.etree import cElementTree as ElementTree
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from .utils import CASEBLOCK_CHUNKSIZE, submit_case_blocks
class CaseBulkDB:
def __init__(self, domain, user_id, device_id):
self.domain = domain
self.user_id = user_id
self.device_id = device_id
def __enter__(self):
self.to_save = []
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.commit()
def save(self, case_block):
self.to_save.append(case_block)
if len(self.to_save) >= CASEBLOCK_CHUNKSIZE:
self.commit()
def commit(self):
if self.to_save:
case_blocks = [
ElementTree.tostring(case_block.as_xml(), encoding='utf-8').decode('utf-8')
for case_block in self.to_save
]
submit_case_blocks(case_blocks, self.domain, device_id=self.device_id, user_id=self.user_id)
self.to_save = []
def update_cases(domain, update_fn, case_ids, user_id, device_id):
"""
Perform a large number of case updates in chunks
update_fn should be a function which accepts a case and returns a CaseBlock
if an update is to be performed, or None to skip the case.
"""
accessor = CaseAccessors(domain)
with CaseBulkDB(domain, user_id, device_id) as bulk_db:
for case in accessor.iter_cases(case_ids):
case_block = update_fn(case)
if case_block:
bulk_db.save(case_block)
|
Add iterative case update script from a while back
|
Add iterative case update script from a while back
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add iterative case update script from a while back
|
from xml.etree import cElementTree as ElementTree
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from .utils import CASEBLOCK_CHUNKSIZE, submit_case_blocks
class CaseBulkDB:
def __init__(self, domain, user_id, device_id):
self.domain = domain
self.user_id = user_id
self.device_id = device_id
def __enter__(self):
self.to_save = []
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.commit()
def save(self, case_block):
self.to_save.append(case_block)
if len(self.to_save) >= CASEBLOCK_CHUNKSIZE:
self.commit()
def commit(self):
if self.to_save:
case_blocks = [
ElementTree.tostring(case_block.as_xml(), encoding='utf-8').decode('utf-8')
for case_block in self.to_save
]
submit_case_blocks(case_blocks, self.domain, device_id=self.device_id, user_id=self.user_id)
self.to_save = []
def update_cases(domain, update_fn, case_ids, user_id, device_id):
"""
Perform a large number of case updates in chunks
update_fn should be a function which accepts a case and returns a CaseBlock
if an update is to be performed, or None to skip the case.
"""
accessor = CaseAccessors(domain)
with CaseBulkDB(domain, user_id, device_id) as bulk_db:
for case in accessor.iter_cases(case_ids):
case_block = update_fn(case)
if case_block:
bulk_db.save(case_block)
|
<commit_before><commit_msg>Add iterative case update script from a while back<commit_after>
|
from xml.etree import cElementTree as ElementTree
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from .utils import CASEBLOCK_CHUNKSIZE, submit_case_blocks
class CaseBulkDB:
def __init__(self, domain, user_id, device_id):
self.domain = domain
self.user_id = user_id
self.device_id = device_id
def __enter__(self):
self.to_save = []
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.commit()
def save(self, case_block):
self.to_save.append(case_block)
if len(self.to_save) >= CASEBLOCK_CHUNKSIZE:
self.commit()
def commit(self):
if self.to_save:
case_blocks = [
ElementTree.tostring(case_block.as_xml(), encoding='utf-8').decode('utf-8')
for case_block in self.to_save
]
submit_case_blocks(case_blocks, self.domain, device_id=self.device_id, user_id=self.user_id)
self.to_save = []
def update_cases(domain, update_fn, case_ids, user_id, device_id):
"""
Perform a large number of case updates in chunks
update_fn should be a function which accepts a case and returns a CaseBlock
if an update is to be performed, or None to skip the case.
"""
accessor = CaseAccessors(domain)
with CaseBulkDB(domain, user_id, device_id) as bulk_db:
for case in accessor.iter_cases(case_ids):
case_block = update_fn(case)
if case_block:
bulk_db.save(case_block)
|
Add iterative case update script from a while backfrom xml.etree import cElementTree as ElementTree
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from .utils import CASEBLOCK_CHUNKSIZE, submit_case_blocks
class CaseBulkDB:
def __init__(self, domain, user_id, device_id):
self.domain = domain
self.user_id = user_id
self.device_id = device_id
def __enter__(self):
self.to_save = []
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.commit()
def save(self, case_block):
self.to_save.append(case_block)
if len(self.to_save) >= CASEBLOCK_CHUNKSIZE:
self.commit()
def commit(self):
if self.to_save:
case_blocks = [
ElementTree.tostring(case_block.as_xml(), encoding='utf-8').decode('utf-8')
for case_block in self.to_save
]
submit_case_blocks(case_blocks, self.domain, device_id=self.device_id, user_id=self.user_id)
self.to_save = []
def update_cases(domain, update_fn, case_ids, user_id, device_id):
"""
Perform a large number of case updates in chunks
update_fn should be a function which accepts a case and returns a CaseBlock
if an update is to be performed, or None to skip the case.
"""
accessor = CaseAccessors(domain)
with CaseBulkDB(domain, user_id, device_id) as bulk_db:
for case in accessor.iter_cases(case_ids):
case_block = update_fn(case)
if case_block:
bulk_db.save(case_block)
|
<commit_before><commit_msg>Add iterative case update script from a while back<commit_after>from xml.etree import cElementTree as ElementTree
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from .utils import CASEBLOCK_CHUNKSIZE, submit_case_blocks
class CaseBulkDB:
def __init__(self, domain, user_id, device_id):
self.domain = domain
self.user_id = user_id
self.device_id = device_id
def __enter__(self):
self.to_save = []
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.commit()
def save(self, case_block):
self.to_save.append(case_block)
if len(self.to_save) >= CASEBLOCK_CHUNKSIZE:
self.commit()
def commit(self):
if self.to_save:
case_blocks = [
ElementTree.tostring(case_block.as_xml(), encoding='utf-8').decode('utf-8')
for case_block in self.to_save
]
submit_case_blocks(case_blocks, self.domain, device_id=self.device_id, user_id=self.user_id)
self.to_save = []
def update_cases(domain, update_fn, case_ids, user_id, device_id):
"""
Perform a large number of case updates in chunks
update_fn should be a function which accepts a case and returns a CaseBlock
if an update is to be performed, or None to skip the case.
"""
accessor = CaseAccessors(domain)
with CaseBulkDB(domain, user_id, device_id) as bulk_db:
for case in accessor.iter_cases(case_ids):
case_block = update_fn(case)
if case_block:
bulk_db.save(case_block)
|
|
c4cd471b2830edbf1d2f24a4d9a79a0d669ae457
|
src/appengine_config.py
|
src/appengine_config.py
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""App Engine config."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
# Enabled copying data from the movieautocomplete app.
remoteapi_CUSTOM_ENVIRONMENT_AUTHENTICATION = (
'HTTP_X_APPENGINE_INBOUND_APPID', ['movieautocomplete'])
|
Add appengine config for data transfer.
|
Add appengine config for data transfer.
|
Python
|
mpl-2.0
|
adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub
|
Add appengine config for data transfer.
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""App Engine config."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
# Enabled copying data from the movieautocomplete app.
remoteapi_CUSTOM_ENVIRONMENT_AUTHENTICATION = (
'HTTP_X_APPENGINE_INBOUND_APPID', ['movieautocomplete'])
|
<commit_before><commit_msg>Add appengine config for data transfer.<commit_after>
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""App Engine config."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
# Enabled copying data from the movieautocomplete app.
remoteapi_CUSTOM_ENVIRONMENT_AUTHENTICATION = (
'HTTP_X_APPENGINE_INBOUND_APPID', ['movieautocomplete'])
|
Add appengine config for data transfer.#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""App Engine config."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
# Enabled copying data from the movieautocomplete app.
remoteapi_CUSTOM_ENVIRONMENT_AUTHENTICATION = (
'HTTP_X_APPENGINE_INBOUND_APPID', ['movieautocomplete'])
|
<commit_before><commit_msg>Add appengine config for data transfer.<commit_after>#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""App Engine config."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
# Enabled copying data from the movieautocomplete app.
remoteapi_CUSTOM_ENVIRONMENT_AUTHENTICATION = (
'HTTP_X_APPENGINE_INBOUND_APPID', ['movieautocomplete'])
|
|
8e05f4d6e0b8c3567776c1f3799e807877bd1fa2
|
dev/spaces.py
|
dev/spaces.py
|
from pathlib import Path
import json
import sys
cwd = Path(sys.argv[0]).parent
cmd_files = cwd.joinpath('../data/packages').glob('*_cmd.json')
all_cmds = {}
for f in cmd_files:
f_cmds = json.load(open(f, encoding='utf8'))
all_cmds.update(f_cmds)
cmds_with_spaces = {}
for key in all_cmds:
if key.find(' ') >= 0:
cmds_with_spaces[key] = all_cmds[key]
for entry in [(k, cmds_with_spaces[k]['package']) for k in cmds_with_spaces.keys()]:
print(f'{entry[1]}: {entry[0]}')
|
Test if some command keys have a space
|
Test if some command keys have a space
|
Python
|
mit
|
James-Yu/LaTeX-Workshop,James-Yu/LaTeX-Workshop,James-Yu/LaTeX-Workshop,James-Yu/LaTeX-Workshop,James-Yu/LaTeX-Workshop
|
Test if some command keys have a space
|
from pathlib import Path
import json
import sys
cwd = Path(sys.argv[0]).parent
cmd_files = cwd.joinpath('../data/packages').glob('*_cmd.json')
all_cmds = {}
for f in cmd_files:
f_cmds = json.load(open(f, encoding='utf8'))
all_cmds.update(f_cmds)
cmds_with_spaces = {}
for key in all_cmds:
if key.find(' ') >= 0:
cmds_with_spaces[key] = all_cmds[key]
for entry in [(k, cmds_with_spaces[k]['package']) for k in cmds_with_spaces.keys()]:
print(f'{entry[1]}: {entry[0]}')
|
<commit_before><commit_msg>Test if some command keys have a space<commit_after>
|
from pathlib import Path
import json
import sys
cwd = Path(sys.argv[0]).parent
cmd_files = cwd.joinpath('../data/packages').glob('*_cmd.json')
all_cmds = {}
for f in cmd_files:
f_cmds = json.load(open(f, encoding='utf8'))
all_cmds.update(f_cmds)
cmds_with_spaces = {}
for key in all_cmds:
if key.find(' ') >= 0:
cmds_with_spaces[key] = all_cmds[key]
for entry in [(k, cmds_with_spaces[k]['package']) for k in cmds_with_spaces.keys()]:
print(f'{entry[1]}: {entry[0]}')
|
Test if some command keys have a spacefrom pathlib import Path
import json
import sys
cwd = Path(sys.argv[0]).parent
cmd_files = cwd.joinpath('../data/packages').glob('*_cmd.json')
all_cmds = {}
for f in cmd_files:
f_cmds = json.load(open(f, encoding='utf8'))
all_cmds.update(f_cmds)
cmds_with_spaces = {}
for key in all_cmds:
if key.find(' ') >= 0:
cmds_with_spaces[key] = all_cmds[key]
for entry in [(k, cmds_with_spaces[k]['package']) for k in cmds_with_spaces.keys()]:
print(f'{entry[1]}: {entry[0]}')
|
<commit_before><commit_msg>Test if some command keys have a space<commit_after>from pathlib import Path
import json
import sys
cwd = Path(sys.argv[0]).parent
cmd_files = cwd.joinpath('../data/packages').glob('*_cmd.json')
all_cmds = {}
for f in cmd_files:
f_cmds = json.load(open(f, encoding='utf8'))
all_cmds.update(f_cmds)
cmds_with_spaces = {}
for key in all_cmds:
if key.find(' ') >= 0:
cmds_with_spaces[key] = all_cmds[key]
for entry in [(k, cmds_with_spaces[k]['package']) for k in cmds_with_spaces.keys()]:
print(f'{entry[1]}: {entry[0]}')
|
|
b414be299064f4f278a9bbb68c9291d9d2e3bc77
|
hoomd/pytest/test_dcd.py
|
hoomd/pytest/test_dcd.py
|
import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
sim.run(10)
|
Test a dcd writer can be attached to a simulation and ran without errors
|
Test a dcd writer can be attached to a simulation and ran without errors
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
Test a dcd writer can be attached to a simulation and ran without errors
|
import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
sim.run(10)
|
<commit_before><commit_msg>Test a dcd writer can be attached to a simulation and ran without errors<commit_after>
|
import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
sim.run(10)
|
Test a dcd writer can be attached to a simulation and ran without errorsimport hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
sim.run(10)
|
<commit_before><commit_msg>Test a dcd writer can be attached to a simulation and ran without errors<commit_after>import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
sim.run(10)
|
|
9591e51fbbc610e0a19a0db760b4db8776f3ae6d
|
games/management/commands/migrate_to_proton.py
|
games/management/commands/migrate_to_proton.py
|
"""Migrates winesteam scripts to Proton"""
import logging
from django.core.management.base import BaseCommand
from common.util import load_yaml, dump_yaml
from games.models import Installer, Runner
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Migrate WineSteam games to Proton"
def is_simple_installer(self, installer):
script = load_yaml(installer.content)
sections = set(script.keys())
if sections in (
{"game"},
{"game", "wine"},
{"game", "winesteam"},
{"game", "winesteam", "system"}
):
return True
# Any Media Foundation workaround is likely very
# outdated at this point
if "Media Foundation" in installer.content:
return True
# People just submitting garbage
if "vcrun2017 dxvk" in installer.content:
return True
print(list(script.keys()))
print(installer.content)
return False
def has_steam_installer(self, installer):
return bool(Installer.objects.filter(
game=installer.game,
runner__slug="steam"
).count())
def get_winesteam_installers(self):
return Installer.objects.filter(runner__slug="winesteam")
def migrate_to_proton(self, installer):
script = load_yaml(installer.content)
appid = script["game"]["appid"]
installer.content = dump_yaml({"game": {"appid": appid}})
installer.runner = self.steam_runner
installer.version = "Proton"
installer.save()
def handle(self, *args, **options):
"""Change install scripts to specify wine prefix architecture"""
self.steam_runner = Runner.objects.get(slug="steam")
installers = self.get_winesteam_installers()
migrate_count = 0
delete_count = 0
for installer in installers:
if self.has_steam_installer(installer):
delete_count += 1
print("Deleting %s" % installer)
installer.delete()
continue
if not self.is_simple_installer(installer):
continue
migrate_count += 1
print("Migrating %s" % installer)
self.migrate_to_proton(installer)
print("%s/%s installers migrated" % (migrate_count, len(installers)))
print("%s/%s installers deleted" % (delete_count, len(installers)))
|
Add command to migrate winesteam installers to Proton
|
Add command to migrate winesteam installers to Proton
|
Python
|
agpl-3.0
|
lutris/website,lutris/website,lutris/website,lutris/website
|
Add command to migrate winesteam installers to Proton
|
"""Migrates winesteam scripts to Proton"""
import logging
from django.core.management.base import BaseCommand
from common.util import load_yaml, dump_yaml
from games.models import Installer, Runner
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Migrate WineSteam games to Proton"
def is_simple_installer(self, installer):
script = load_yaml(installer.content)
sections = set(script.keys())
if sections in (
{"game"},
{"game", "wine"},
{"game", "winesteam"},
{"game", "winesteam", "system"}
):
return True
# Any Media Foundation workaround is likely very
# outdated at this point
if "Media Foundation" in installer.content:
return True
# People just submitting garbage
if "vcrun2017 dxvk" in installer.content:
return True
print(list(script.keys()))
print(installer.content)
return False
def has_steam_installer(self, installer):
return bool(Installer.objects.filter(
game=installer.game,
runner__slug="steam"
).count())
def get_winesteam_installers(self):
return Installer.objects.filter(runner__slug="winesteam")
def migrate_to_proton(self, installer):
script = load_yaml(installer.content)
appid = script["game"]["appid"]
installer.content = dump_yaml({"game": {"appid": appid}})
installer.runner = self.steam_runner
installer.version = "Proton"
installer.save()
def handle(self, *args, **options):
"""Change install scripts to specify wine prefix architecture"""
self.steam_runner = Runner.objects.get(slug="steam")
installers = self.get_winesteam_installers()
migrate_count = 0
delete_count = 0
for installer in installers:
if self.has_steam_installer(installer):
delete_count += 1
print("Deleting %s" % installer)
installer.delete()
continue
if not self.is_simple_installer(installer):
continue
migrate_count += 1
print("Migrating %s" % installer)
self.migrate_to_proton(installer)
print("%s/%s installers migrated" % (migrate_count, len(installers)))
print("%s/%s installers deleted" % (delete_count, len(installers)))
|
<commit_before><commit_msg>Add command to migrate winesteam installers to Proton<commit_after>
|
"""Migrates winesteam scripts to Proton"""
import logging
from django.core.management.base import BaseCommand
from common.util import load_yaml, dump_yaml
from games.models import Installer, Runner
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Migrate WineSteam games to Proton"
def is_simple_installer(self, installer):
script = load_yaml(installer.content)
sections = set(script.keys())
if sections in (
{"game"},
{"game", "wine"},
{"game", "winesteam"},
{"game", "winesteam", "system"}
):
return True
# Any Media Foundation workaround is likely very
# outdated at this point
if "Media Foundation" in installer.content:
return True
# People just submitting garbage
if "vcrun2017 dxvk" in installer.content:
return True
print(list(script.keys()))
print(installer.content)
return False
def has_steam_installer(self, installer):
return bool(Installer.objects.filter(
game=installer.game,
runner__slug="steam"
).count())
def get_winesteam_installers(self):
return Installer.objects.filter(runner__slug="winesteam")
def migrate_to_proton(self, installer):
script = load_yaml(installer.content)
appid = script["game"]["appid"]
installer.content = dump_yaml({"game": {"appid": appid}})
installer.runner = self.steam_runner
installer.version = "Proton"
installer.save()
def handle(self, *args, **options):
"""Change install scripts to specify wine prefix architecture"""
self.steam_runner = Runner.objects.get(slug="steam")
installers = self.get_winesteam_installers()
migrate_count = 0
delete_count = 0
for installer in installers:
if self.has_steam_installer(installer):
delete_count += 1
print("Deleting %s" % installer)
installer.delete()
continue
if not self.is_simple_installer(installer):
continue
migrate_count += 1
print("Migrating %s" % installer)
self.migrate_to_proton(installer)
print("%s/%s installers migrated" % (migrate_count, len(installers)))
print("%s/%s installers deleted" % (delete_count, len(installers)))
|
Add command to migrate winesteam installers to Proton"""Migrates winesteam scripts to Proton"""
import logging
from django.core.management.base import BaseCommand
from common.util import load_yaml, dump_yaml
from games.models import Installer, Runner
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Migrate WineSteam games to Proton"
def is_simple_installer(self, installer):
script = load_yaml(installer.content)
sections = set(script.keys())
if sections in (
{"game"},
{"game", "wine"},
{"game", "winesteam"},
{"game", "winesteam", "system"}
):
return True
# Any Media Foundation workaround is likely very
# outdated at this point
if "Media Foundation" in installer.content:
return True
# People just submitting garbage
if "vcrun2017 dxvk" in installer.content:
return True
print(list(script.keys()))
print(installer.content)
return False
def has_steam_installer(self, installer):
return bool(Installer.objects.filter(
game=installer.game,
runner__slug="steam"
).count())
def get_winesteam_installers(self):
return Installer.objects.filter(runner__slug="winesteam")
def migrate_to_proton(self, installer):
script = load_yaml(installer.content)
appid = script["game"]["appid"]
installer.content = dump_yaml({"game": {"appid": appid}})
installer.runner = self.steam_runner
installer.version = "Proton"
installer.save()
def handle(self, *args, **options):
"""Change install scripts to specify wine prefix architecture"""
self.steam_runner = Runner.objects.get(slug="steam")
installers = self.get_winesteam_installers()
migrate_count = 0
delete_count = 0
for installer in installers:
if self.has_steam_installer(installer):
delete_count += 1
print("Deleting %s" % installer)
installer.delete()
continue
if not self.is_simple_installer(installer):
continue
migrate_count += 1
print("Migrating %s" % installer)
self.migrate_to_proton(installer)
print("%s/%s installers migrated" % (migrate_count, len(installers)))
print("%s/%s installers deleted" % (delete_count, len(installers)))
|
<commit_before><commit_msg>Add command to migrate winesteam installers to Proton<commit_after>"""Migrates winesteam scripts to Proton"""
import logging
from django.core.management.base import BaseCommand
from common.util import load_yaml, dump_yaml
from games.models import Installer, Runner
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Migrate WineSteam games to Proton"
def is_simple_installer(self, installer):
script = load_yaml(installer.content)
sections = set(script.keys())
if sections in (
{"game"},
{"game", "wine"},
{"game", "winesteam"},
{"game", "winesteam", "system"}
):
return True
# Any Media Foundation workaround is likely very
# outdated at this point
if "Media Foundation" in installer.content:
return True
# People just submitting garbage
if "vcrun2017 dxvk" in installer.content:
return True
print(list(script.keys()))
print(installer.content)
return False
def has_steam_installer(self, installer):
return bool(Installer.objects.filter(
game=installer.game,
runner__slug="steam"
).count())
def get_winesteam_installers(self):
return Installer.objects.filter(runner__slug="winesteam")
def migrate_to_proton(self, installer):
script = load_yaml(installer.content)
appid = script["game"]["appid"]
installer.content = dump_yaml({"game": {"appid": appid}})
installer.runner = self.steam_runner
installer.version = "Proton"
installer.save()
def handle(self, *args, **options):
"""Change install scripts to specify wine prefix architecture"""
self.steam_runner = Runner.objects.get(slug="steam")
installers = self.get_winesteam_installers()
migrate_count = 0
delete_count = 0
for installer in installers:
if self.has_steam_installer(installer):
delete_count += 1
print("Deleting %s" % installer)
installer.delete()
continue
if not self.is_simple_installer(installer):
continue
migrate_count += 1
print("Migrating %s" % installer)
self.migrate_to_proton(installer)
print("%s/%s installers migrated" % (migrate_count, len(installers)))
print("%s/%s installers deleted" % (delete_count, len(installers)))
|
|
89a985433218968024fe766c26d68bc3e89b501f
|
kboard/functional_test/test_post_validation.py
|
kboard/functional_test/test_post_validation.py
|
from .base import FunctionalTest
class PostValidationTest(FunctionalTest):
def test_cannot_add_empty_title(self):
self.browser.get(self.live_server_url)
self.move_to_default_board()
# 지훈이는 새 게시글을 작성하기 위해 글 쓰기 버튼을 누른다.
self.click_create_post_button()
# 빈 제목의 게시글을 실수로 등록하려고 한다.
# content는 입력하고
contentbox = self.get_contentbox()
contentbox.send_keys('Content of This Post')
self.browser.switch_to.default_content()
# title의 입력 상자가 비어있는 상태에서 '등록' 버튼을 누른다.
self.click_submit_button()
# 페이지가 새로고침되고 빈 아이템을 등록할 수 없다는 에러 메세지가 표시된다.
error = self.browser.find_element_by_css_selector('.has-error')
self.assertEqual(error.text, "제목을 입력하세요")
# 제목을 입력해서 다시 시도하면 정상 처리된다.
titlebox = self.browser.find_element_by_id('id_post_title')
titlebox.send_keys('Title of This Post')
self.click_submit_button()
self.check_for_row_in_list_table('id_post_list_table', 'Title of This Post')
|
Add post title validation functional test
|
Add post title validation functional test
|
Python
|
mit
|
darjeeling/k-board,kboard/kboard,guswnsxodlf/k-board,hyesun03/k-board,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,kboard/kboard,kboard/kboard,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,cjh5414/kboard
|
Add post title validation functional test
|
from .base import FunctionalTest
class PostValidationTest(FunctionalTest):
def test_cannot_add_empty_title(self):
self.browser.get(self.live_server_url)
self.move_to_default_board()
# 지훈이는 새 게시글을 작성하기 위해 글 쓰기 버튼을 누른다.
self.click_create_post_button()
# 빈 제목의 게시글을 실수로 등록하려고 한다.
# content는 입력하고
contentbox = self.get_contentbox()
contentbox.send_keys('Content of This Post')
self.browser.switch_to.default_content()
# title의 입력 상자가 비어있는 상태에서 '등록' 버튼을 누른다.
self.click_submit_button()
# 페이지가 새로고침되고 빈 아이템을 등록할 수 없다는 에러 메세지가 표시된다.
error = self.browser.find_element_by_css_selector('.has-error')
self.assertEqual(error.text, "제목을 입력하세요")
# 제목을 입력해서 다시 시도하면 정상 처리된다.
titlebox = self.browser.find_element_by_id('id_post_title')
titlebox.send_keys('Title of This Post')
self.click_submit_button()
self.check_for_row_in_list_table('id_post_list_table', 'Title of This Post')
|
<commit_before><commit_msg>Add post title validation functional test<commit_after>
|
from .base import FunctionalTest
class PostValidationTest(FunctionalTest):
def test_cannot_add_empty_title(self):
self.browser.get(self.live_server_url)
self.move_to_default_board()
# 지훈이는 새 게시글을 작성하기 위해 글 쓰기 버튼을 누른다.
self.click_create_post_button()
# 빈 제목의 게시글을 실수로 등록하려고 한다.
# content는 입력하고
contentbox = self.get_contentbox()
contentbox.send_keys('Content of This Post')
self.browser.switch_to.default_content()
# title의 입력 상자가 비어있는 상태에서 '등록' 버튼을 누른다.
self.click_submit_button()
# 페이지가 새로고침되고 빈 아이템을 등록할 수 없다는 에러 메세지가 표시된다.
error = self.browser.find_element_by_css_selector('.has-error')
self.assertEqual(error.text, "제목을 입력하세요")
# 제목을 입력해서 다시 시도하면 정상 처리된다.
titlebox = self.browser.find_element_by_id('id_post_title')
titlebox.send_keys('Title of This Post')
self.click_submit_button()
self.check_for_row_in_list_table('id_post_list_table', 'Title of This Post')
|
Add post title validation functional testfrom .base import FunctionalTest
class PostValidationTest(FunctionalTest):
def test_cannot_add_empty_title(self):
self.browser.get(self.live_server_url)
self.move_to_default_board()
# 지훈이는 새 게시글을 작성하기 위해 글 쓰기 버튼을 누른다.
self.click_create_post_button()
# 빈 제목의 게시글을 실수로 등록하려고 한다.
# content는 입력하고
contentbox = self.get_contentbox()
contentbox.send_keys('Content of This Post')
self.browser.switch_to.default_content()
# title의 입력 상자가 비어있는 상태에서 '등록' 버튼을 누른다.
self.click_submit_button()
# 페이지가 새로고침되고 빈 아이템을 등록할 수 없다는 에러 메세지가 표시된다.
error = self.browser.find_element_by_css_selector('.has-error')
self.assertEqual(error.text, "제목을 입력하세요")
# 제목을 입력해서 다시 시도하면 정상 처리된다.
titlebox = self.browser.find_element_by_id('id_post_title')
titlebox.send_keys('Title of This Post')
self.click_submit_button()
self.check_for_row_in_list_table('id_post_list_table', 'Title of This Post')
|
<commit_before><commit_msg>Add post title validation functional test<commit_after>from .base import FunctionalTest
class PostValidationTest(FunctionalTest):
def test_cannot_add_empty_title(self):
self.browser.get(self.live_server_url)
self.move_to_default_board()
# 지훈이는 새 게시글을 작성하기 위해 글 쓰기 버튼을 누른다.
self.click_create_post_button()
# 빈 제목의 게시글을 실수로 등록하려고 한다.
# content는 입력하고
contentbox = self.get_contentbox()
contentbox.send_keys('Content of This Post')
self.browser.switch_to.default_content()
# title의 입력 상자가 비어있는 상태에서 '등록' 버튼을 누른다.
self.click_submit_button()
# 페이지가 새로고침되고 빈 아이템을 등록할 수 없다는 에러 메세지가 표시된다.
error = self.browser.find_element_by_css_selector('.has-error')
self.assertEqual(error.text, "제목을 입력하세요")
# 제목을 입력해서 다시 시도하면 정상 처리된다.
titlebox = self.browser.find_element_by_id('id_post_title')
titlebox.send_keys('Title of This Post')
self.click_submit_button()
self.check_for_row_in_list_table('id_post_list_table', 'Title of This Post')
|
|
cd0c618af63fed1cd7006bb67da46eac0ddbb1c7
|
scripts/find_logins_for_ipaddress.py
|
scripts/find_logins_for_ipaddress.py
|
#!/usr/bin/env python
"""Find user login events for an IP address.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Dict, List
import click
from byceps.services.user.models.event import UserEvent
from byceps.services.user import service as user_service
from byceps.services.user.transfer.models import User
from byceps.typing import PartyID, UserID
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('ip_address')
def execute(ip_address: str):
events = find_events(ip_address)
users_by_id = get_users_by_id(events)
for event in events:
user = users_by_id[event.user_id]
click.echo(f'{event.occurred_at}\t{ip_address}\t{user.screen_name}')
def find_events(ip_address: str) -> List[UserEvent]:
return UserEvent.query \
.filter_by(event_type='user-logged-in') \
.filter(UserEvent.data['ip_address'].astext == ip_address) \
.order_by(UserEvent.occurred_at) \
.all()
def get_users_by_id(events: List[UserEvent]) -> Dict[UserID, User]:
user_ids = {event.user_id for event in events}
users = user_service.find_users(user_ids)
return {user.id: user for user in users}
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
Add script to find user login events for IP address
|
Add script to find user login events for IP address
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
|
Add script to find user login events for IP address
|
#!/usr/bin/env python
"""Find user login events for an IP address.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Dict, List
import click
from byceps.services.user.models.event import UserEvent
from byceps.services.user import service as user_service
from byceps.services.user.transfer.models import User
from byceps.typing import PartyID, UserID
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('ip_address')
def execute(ip_address: str):
events = find_events(ip_address)
users_by_id = get_users_by_id(events)
for event in events:
user = users_by_id[event.user_id]
click.echo(f'{event.occurred_at}\t{ip_address}\t{user.screen_name}')
def find_events(ip_address: str) -> List[UserEvent]:
return UserEvent.query \
.filter_by(event_type='user-logged-in') \
.filter(UserEvent.data['ip_address'].astext == ip_address) \
.order_by(UserEvent.occurred_at) \
.all()
def get_users_by_id(events: List[UserEvent]) -> Dict[UserID, User]:
user_ids = {event.user_id for event in events}
users = user_service.find_users(user_ids)
return {user.id: user for user in users}
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
<commit_before><commit_msg>Add script to find user login events for IP address<commit_after>
|
#!/usr/bin/env python
"""Find user login events for an IP address.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Dict, List
import click
from byceps.services.user.models.event import UserEvent
from byceps.services.user import service as user_service
from byceps.services.user.transfer.models import User
from byceps.typing import PartyID, UserID
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('ip_address')
def execute(ip_address: str):
events = find_events(ip_address)
users_by_id = get_users_by_id(events)
for event in events:
user = users_by_id[event.user_id]
click.echo(f'{event.occurred_at}\t{ip_address}\t{user.screen_name}')
def find_events(ip_address: str) -> List[UserEvent]:
return UserEvent.query \
.filter_by(event_type='user-logged-in') \
.filter(UserEvent.data['ip_address'].astext == ip_address) \
.order_by(UserEvent.occurred_at) \
.all()
def get_users_by_id(events: List[UserEvent]) -> Dict[UserID, User]:
user_ids = {event.user_id for event in events}
users = user_service.find_users(user_ids)
return {user.id: user for user in users}
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
Add script to find user login events for IP address#!/usr/bin/env python
"""Find user login events for an IP address.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Dict, List
import click
from byceps.services.user.models.event import UserEvent
from byceps.services.user import service as user_service
from byceps.services.user.transfer.models import User
from byceps.typing import PartyID, UserID
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('ip_address')
def execute(ip_address: str):
events = find_events(ip_address)
users_by_id = get_users_by_id(events)
for event in events:
user = users_by_id[event.user_id]
click.echo(f'{event.occurred_at}\t{ip_address}\t{user.screen_name}')
def find_events(ip_address: str) -> List[UserEvent]:
return UserEvent.query \
.filter_by(event_type='user-logged-in') \
.filter(UserEvent.data['ip_address'].astext == ip_address) \
.order_by(UserEvent.occurred_at) \
.all()
def get_users_by_id(events: List[UserEvent]) -> Dict[UserID, User]:
user_ids = {event.user_id for event in events}
users = user_service.find_users(user_ids)
return {user.id: user for user in users}
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
<commit_before><commit_msg>Add script to find user login events for IP address<commit_after>#!/usr/bin/env python
"""Find user login events for an IP address.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Dict, List
import click
from byceps.services.user.models.event import UserEvent
from byceps.services.user import service as user_service
from byceps.services.user.transfer.models import User
from byceps.typing import PartyID, UserID
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('ip_address')
def execute(ip_address: str):
events = find_events(ip_address)
users_by_id = get_users_by_id(events)
for event in events:
user = users_by_id[event.user_id]
click.echo(f'{event.occurred_at}\t{ip_address}\t{user.screen_name}')
def find_events(ip_address: str) -> List[UserEvent]:
return UserEvent.query \
.filter_by(event_type='user-logged-in') \
.filter(UserEvent.data['ip_address'].astext == ip_address) \
.order_by(UserEvent.occurred_at) \
.all()
def get_users_by_id(events: List[UserEvent]) -> Dict[UserID, User]:
user_ids = {event.user_id for event in events}
users = user_service.find_users(user_ids)
return {user.id: user for user in users}
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
|
ded313eaf070cf46b07ea0c67b16d00be804bbda
|
scripts/make_lfd_settings_package.py
|
scripts/make_lfd_settings_package.py
|
import argparse
import os
import lfd
def make_settings_tree(src, dst):
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
make_settings_tree(srcname, dstname)
elif name == 'settings.py':
if not os.path.isdir(dst):
os.makedirs(dst)
open(dstname, 'a')
open(os.path.join(dst, '__init__.py'), 'a')
def make_lfd_settings_package(lfd_settings_name):
""" Makes the lfd_settings package.
Makes the lfd_settings package with settings.py files with the same
subpackage and module structure as the lfd package. Only makes subpackages
and modules for which the corresponding one in the lfd package has a
settings.py file.
"""
lfd_name = os.path.dirname(lfd.__file__)
make_settings_tree(lfd_name, lfd_settings_name)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('lfd_settings_name', type=str, help="Destination name for the lfd_settings package.")
args = parser.parse_args()
make_lfd_settings_package(args.lfd_settings_name)
if __name__ == '__main__':
main()
|
Add script to make the lfd_settings package.
|
Add script to make the lfd_settings package.
|
Python
|
bsd-2-clause
|
wjchen84/lfd,rll/lfd,wjchen84/lfd,wjchen84/lfd,rll/lfd,rll/lfd
|
Add script to make the lfd_settings package.
|
import argparse
import os
import lfd
def make_settings_tree(src, dst):
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
make_settings_tree(srcname, dstname)
elif name == 'settings.py':
if not os.path.isdir(dst):
os.makedirs(dst)
open(dstname, 'a')
open(os.path.join(dst, '__init__.py'), 'a')
def make_lfd_settings_package(lfd_settings_name):
""" Makes the lfd_settings package.
Makes the lfd_settings package with settings.py files with the same
subpackage and module structure as the lfd package. Only makes subpackages
and modules for which the corresponding one in the lfd package has a
settings.py file.
"""
lfd_name = os.path.dirname(lfd.__file__)
make_settings_tree(lfd_name, lfd_settings_name)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('lfd_settings_name', type=str, help="Destination name for the lfd_settings package.")
args = parser.parse_args()
make_lfd_settings_package(args.lfd_settings_name)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to make the lfd_settings package.<commit_after>
|
import argparse
import os
import lfd
def make_settings_tree(src, dst):
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
make_settings_tree(srcname, dstname)
elif name == 'settings.py':
if not os.path.isdir(dst):
os.makedirs(dst)
open(dstname, 'a')
open(os.path.join(dst, '__init__.py'), 'a')
def make_lfd_settings_package(lfd_settings_name):
""" Makes the lfd_settings package.
Makes the lfd_settings package with settings.py files with the same
subpackage and module structure as the lfd package. Only makes subpackages
and modules for which the corresponding one in the lfd package has a
settings.py file.
"""
lfd_name = os.path.dirname(lfd.__file__)
make_settings_tree(lfd_name, lfd_settings_name)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('lfd_settings_name', type=str, help="Destination name for the lfd_settings package.")
args = parser.parse_args()
make_lfd_settings_package(args.lfd_settings_name)
if __name__ == '__main__':
main()
|
Add script to make the lfd_settings package.import argparse
import os
import lfd
def make_settings_tree(src, dst):
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
make_settings_tree(srcname, dstname)
elif name == 'settings.py':
if not os.path.isdir(dst):
os.makedirs(dst)
open(dstname, 'a')
open(os.path.join(dst, '__init__.py'), 'a')
def make_lfd_settings_package(lfd_settings_name):
""" Makes the lfd_settings package.
Makes the lfd_settings package with settings.py files with the same
subpackage and module structure as the lfd package. Only makes subpackages
and modules for which the corresponding one in the lfd package has a
settings.py file.
"""
lfd_name = os.path.dirname(lfd.__file__)
make_settings_tree(lfd_name, lfd_settings_name)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('lfd_settings_name', type=str, help="Destination name for the lfd_settings package.")
args = parser.parse_args()
make_lfd_settings_package(args.lfd_settings_name)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to make the lfd_settings package.<commit_after>import argparse
import os
import lfd
def make_settings_tree(src, dst):
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
make_settings_tree(srcname, dstname)
elif name == 'settings.py':
if not os.path.isdir(dst):
os.makedirs(dst)
open(dstname, 'a')
open(os.path.join(dst, '__init__.py'), 'a')
def make_lfd_settings_package(lfd_settings_name):
""" Makes the lfd_settings package.
Makes the lfd_settings package with settings.py files with the same
subpackage and module structure as the lfd package. Only makes subpackages
and modules for which the corresponding one in the lfd package has a
settings.py file.
"""
lfd_name = os.path.dirname(lfd.__file__)
make_settings_tree(lfd_name, lfd_settings_name)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('lfd_settings_name', type=str, help="Destination name for the lfd_settings package.")
args = parser.parse_args()
make_lfd_settings_package(args.lfd_settings_name)
if __name__ == '__main__':
main()
|
|
15e7de59af35c1c45a4a2be4658729a8b0eb8082
|
experimental/directshow.py
|
experimental/directshow.py
|
#!/usr/bin/python
# $Id:$
# Play an audio file with DirectShow. Tested ok with MP3, WMA, MID, WAV, AU.
# Caveats:
# - Requires a filename (not from memory or stream yet). Looks like we need
# to manually implement a filter which provides an output IPin. Lot of
# work.
# - Theoretically can traverse the graph to get the output filter, which by
# default is supposed to implement IDirectSound3DBuffer, for positioned
# sounds. Untested.
# - Requires comtypes. Can work around this in future by implementing the
# small subset of comtypes ourselves (or including a snapshot of comtypes in
# pyglet).
import ctypes
from comtypes import client
import sys
import time
filename = sys.argv[1]
qedit = client.GetModule('qedit.dll') # DexterLib
quartz = client.GetModule('quartz.dll') #
CLSID_FilterGraph = '{e436ebb3-524f-11ce-9f53-0020af0ba770}'
filter_graph = client.CreateObject(CLSID_FilterGraph,
interface=qedit.IFilterGraph)
filter_builder = filter_graph.QueryInterface(qedit.IGraphBuilder)
filter_builder.RenderFile(filename, None)
media_control = filter_graph.QueryInterface(quartz.IMediaControl)
media_control.Run()
try:
# Look at IMediaEvent interface for EOS notification
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
# Need these because finalisers don't have enough context to clean up after
# themselves when script exits.
del media_control
del filter_builder
del filter_graph
|
Move win32 audio experiment to trunk.
|
Move win32 audio experiment to trunk.
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40736
|
Python
|
bsd-3-clause
|
infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore,infowantstobeseen/pyglet-darwincore
|
Move win32 audio experiment to trunk.
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40736
|
#!/usr/bin/python
# $Id:$
# Play an audio file with DirectShow. Tested ok with MP3, WMA, MID, WAV, AU.
# Caveats:
# - Requires a filename (not from memory or stream yet). Looks like we need
# to manually implement a filter which provides an output IPin. Lot of
# work.
# - Theoretically can traverse the graph to get the output filter, which by
# default is supposed to implement IDirectSound3DBuffer, for positioned
# sounds. Untested.
# - Requires comtypes. Can work around this in future by implementing the
# small subset of comtypes ourselves (or including a snapshot of comtypes in
# pyglet).
import ctypes
from comtypes import client
import sys
import time
filename = sys.argv[1]
qedit = client.GetModule('qedit.dll') # DexterLib
quartz = client.GetModule('quartz.dll') #
CLSID_FilterGraph = '{e436ebb3-524f-11ce-9f53-0020af0ba770}'
filter_graph = client.CreateObject(CLSID_FilterGraph,
interface=qedit.IFilterGraph)
filter_builder = filter_graph.QueryInterface(qedit.IGraphBuilder)
filter_builder.RenderFile(filename, None)
media_control = filter_graph.QueryInterface(quartz.IMediaControl)
media_control.Run()
try:
# Look at IMediaEvent interface for EOS notification
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
# Need these because finalisers don't have enough context to clean up after
# themselves when script exits.
del media_control
del filter_builder
del filter_graph
|
<commit_before><commit_msg>Move win32 audio experiment to trunk.
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40736<commit_after>
|
#!/usr/bin/python
# $Id:$
# Play an audio file with DirectShow. Tested ok with MP3, WMA, MID, WAV, AU.
# Caveats:
# - Requires a filename (not from memory or stream yet). Looks like we need
# to manually implement a filter which provides an output IPin. Lot of
# work.
# - Theoretically can traverse the graph to get the output filter, which by
# default is supposed to implement IDirectSound3DBuffer, for positioned
# sounds. Untested.
# - Requires comtypes. Can work around this in future by implementing the
# small subset of comtypes ourselves (or including a snapshot of comtypes in
# pyglet).
import ctypes
from comtypes import client
import sys
import time
filename = sys.argv[1]
qedit = client.GetModule('qedit.dll') # DexterLib
quartz = client.GetModule('quartz.dll') #
CLSID_FilterGraph = '{e436ebb3-524f-11ce-9f53-0020af0ba770}'
filter_graph = client.CreateObject(CLSID_FilterGraph,
interface=qedit.IFilterGraph)
filter_builder = filter_graph.QueryInterface(qedit.IGraphBuilder)
filter_builder.RenderFile(filename, None)
media_control = filter_graph.QueryInterface(quartz.IMediaControl)
media_control.Run()
try:
# Look at IMediaEvent interface for EOS notification
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
# Need these because finalisers don't have enough context to clean up after
# themselves when script exits.
del media_control
del filter_builder
del filter_graph
|
Move win32 audio experiment to trunk.
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40736#!/usr/bin/python
# $Id:$
# Play an audio file with DirectShow. Tested ok with MP3, WMA, MID, WAV, AU.
# Caveats:
# - Requires a filename (not from memory or stream yet). Looks like we need
# to manually implement a filter which provides an output IPin. Lot of
# work.
# - Theoretically can traverse the graph to get the output filter, which by
# default is supposed to implement IDirectSound3DBuffer, for positioned
# sounds. Untested.
# - Requires comtypes. Can work around this in future by implementing the
# small subset of comtypes ourselves (or including a snapshot of comtypes in
# pyglet).
import ctypes
from comtypes import client
import sys
import time
filename = sys.argv[1]
qedit = client.GetModule('qedit.dll') # DexterLib
quartz = client.GetModule('quartz.dll') #
CLSID_FilterGraph = '{e436ebb3-524f-11ce-9f53-0020af0ba770}'
filter_graph = client.CreateObject(CLSID_FilterGraph,
interface=qedit.IFilterGraph)
filter_builder = filter_graph.QueryInterface(qedit.IGraphBuilder)
filter_builder.RenderFile(filename, None)
media_control = filter_graph.QueryInterface(quartz.IMediaControl)
media_control.Run()
try:
# Look at IMediaEvent interface for EOS notification
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
# Need these because finalisers don't have enough context to clean up after
# themselves when script exits.
del media_control
del filter_builder
del filter_graph
|
<commit_before><commit_msg>Move win32 audio experiment to trunk.
--HG--
extra : convert_revision : svn%3A14d46d22-621c-0410-bb3d-6f67920f7d95/trunk%40736<commit_after>#!/usr/bin/python
# $Id:$
# Play an audio file with DirectShow. Tested ok with MP3, WMA, MID, WAV, AU.
# Caveats:
# - Requires a filename (not from memory or stream yet). Looks like we need
# to manually implement a filter which provides an output IPin. Lot of
# work.
# - Theoretically can traverse the graph to get the output filter, which by
# default is supposed to implement IDirectSound3DBuffer, for positioned
# sounds. Untested.
# - Requires comtypes. Can work around this in future by implementing the
# small subset of comtypes ourselves (or including a snapshot of comtypes in
# pyglet).
import ctypes
from comtypes import client
import sys
import time
filename = sys.argv[1]
qedit = client.GetModule('qedit.dll') # DexterLib
quartz = client.GetModule('quartz.dll') #
CLSID_FilterGraph = '{e436ebb3-524f-11ce-9f53-0020af0ba770}'
filter_graph = client.CreateObject(CLSID_FilterGraph,
interface=qedit.IFilterGraph)
filter_builder = filter_graph.QueryInterface(qedit.IGraphBuilder)
filter_builder.RenderFile(filename, None)
media_control = filter_graph.QueryInterface(quartz.IMediaControl)
media_control.Run()
try:
# Look at IMediaEvent interface for EOS notification
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
# Need these because finalisers don't have enough context to clean up after
# themselves when script exits.
del media_control
del filter_builder
del filter_graph
|
|
401d7c80c3ea69cbb41f159d0c709fb08668c7f9
|
test/standalonewavetrigger.py
|
test/standalonewavetrigger.py
|
#!/usr/bin/env python
# Standard library imports
import argparse
import collections
import functools
import logging
import os
import json
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger('wavetrigger')
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave filenames in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines()
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
|
Add the standalone wave trigger script.
|
Add the standalone wave trigger script.
|
Python
|
apache-2.0
|
lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro,lordjabez/light-maestro
|
Add the standalone wave trigger script.
|
#!/usr/bin/env python
# Standard library imports
import argparse
import collections
import functools
import logging
import os
import json
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger('wavetrigger')
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave filenames in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines()
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
|
<commit_before><commit_msg>Add the standalone wave trigger script.<commit_after>
|
#!/usr/bin/env python
# Standard library imports
import argparse
import collections
import functools
import logging
import os
import json
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger('wavetrigger')
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave filenames in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines()
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
|
Add the standalone wave trigger script.#!/usr/bin/env python
# Standard library imports
import argparse
import collections
import functools
import logging
import os
import json
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger('wavetrigger')
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave filenames in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines()
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
|
<commit_before><commit_msg>Add the standalone wave trigger script.<commit_after>#!/usr/bin/env python
# Standard library imports
import argparse
import collections
import functools
import logging
import os
import json
import time
# Additional library imports
import requests
# Named logger for this module
_logger = logging.getLogger('wavetrigger')
# Parse the command line arguments
_parser = argparse.ArgumentParser('')
_parser.add_argument('-t', '--triggers', default='triggers', help='Folder containing trigger files')
_parser.add_argument('-r', '--rate', default=4.0, help='Poll rate in polls per second')
_parser.add_argument('-d', '--debug', action='store_true', help='Enables debug logging')
_args = _parser.parse_args()
# Configure the logging module
_logformat = '%(asctime)s : %(levelname)s : %(name)s : %(message)s'
_loglevel = logging.DEBUG if _args.debug else logging.INFO
logging.basicConfig(format=_logformat, level=_loglevel)
logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
# We use a session variable so that HTTP keep-alive is utilized, and
# also so we'll always remember to set the content type appropriately.
_session = requests.session()
_session.headers['Content-Type'] = 'application/json'
# Stores previous last access times for each file
# so they can be compared each time files are polled.
_atimes = collections.defaultdict(time.time)
# Poll the list of files forever
while True:
# Delay the appropriate amount of time between polls
time.sleep(1.0 / _args.rate)
# Grab a list of all fully-qualified wave filenames in the trigger folder
files = (os.path.join(_args.triggers, f) for f in os.listdir(_args.triggers) if os.path.splitext(f)[1] == '.wav')
# Iterate over the list of files
for filename in files:
# If the last access time is newer than what was previous recorded then take
# action on that file. A small threshold is used to prevent "double bouncing".
if os.stat(filename).st_atime - _atimes[filename] > 1.0:
# Open the file and pull out the data
with open(filename, 'rb') as f:
req = f.read()
# Immediately store off the last accessed time
_atimes[filename] = os.stat(filename).st_atime
# Separate the components of the request
method, url, data = req[52:].splitlines()
# Attempt to send the request and log the results
_logger.debug('Sending {0} request to {1}'.format(method, url))
try:
response = _session.request(method, url, data=data)
_logger.debug('Received response with status code {0}'.format(response.status_code))
except requests.RequestException:
_logger.warning('Unable to contact {0}'.format(url))
|
|
6de3dddfadce6bb582adfe1e72fdd26afa974033
|
server/data_updates/00005_20181114-090110_vocabularies.py
|
server/data_updates/00005_20181114-090110_vocabularies.py
|
# -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Creation: 2018-11-14 10:31
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
class DataUpdate(DataUpdate):
resource = 'vocabularies'
def forwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('single_value', False):
value = 'single selection'
else:
value = 'multi selection'
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'selection_type': value},
'$unset': {'single_value': 1}
})
def backwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('selection_type') == 'single selection':
value = True
else:
value = False
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'single_value': value},
'$unset': {'selection_type': 1}
})
|
Replace single_value with selection_type (SDESK-3551)
|
feat(vocabularies): Replace single_value with selection_type (SDESK-3551)
|
Python
|
agpl-3.0
|
pavlovicnemanja/superdesk,hlmnrmr/superdesk,superdesk/superdesk,ioanpocol/superdesk,superdesk/superdesk,superdesk/superdesk,petrjasek/superdesk,petrjasek/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja/superdesk,ioanpocol/superdesk,superdesk/superdesk,petrjasek/superdesk,pavlovicnemanja/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja92/superdesk,hlmnrmr/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,hlmnrmr/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk
|
feat(vocabularies): Replace single_value with selection_type (SDESK-3551)
|
# -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Creation: 2018-11-14 10:31
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
class DataUpdate(DataUpdate):
resource = 'vocabularies'
def forwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('single_value', False):
value = 'single selection'
else:
value = 'multi selection'
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'selection_type': value},
'$unset': {'single_value': 1}
})
def backwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('selection_type') == 'single selection':
value = True
else:
value = False
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'single_value': value},
'$unset': {'selection_type': 1}
})
|
<commit_before><commit_msg>feat(vocabularies): Replace single_value with selection_type (SDESK-3551)<commit_after>
|
# -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Creation: 2018-11-14 10:31
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
class DataUpdate(DataUpdate):
resource = 'vocabularies'
def forwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('single_value', False):
value = 'single selection'
else:
value = 'multi selection'
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'selection_type': value},
'$unset': {'single_value': 1}
})
def backwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('selection_type') == 'single selection':
value = True
else:
value = False
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'single_value': value},
'$unset': {'selection_type': 1}
})
|
feat(vocabularies): Replace single_value with selection_type (SDESK-3551)# -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Creation: 2018-11-14 10:31
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
class DataUpdate(DataUpdate):
resource = 'vocabularies'
def forwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('single_value', False):
value = 'single selection'
else:
value = 'multi selection'
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'selection_type': value},
'$unset': {'single_value': 1}
})
def backwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('selection_type') == 'single selection':
value = True
else:
value = False
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'single_value': value},
'$unset': {'selection_type': 1}
})
|
<commit_before><commit_msg>feat(vocabularies): Replace single_value with selection_type (SDESK-3551)<commit_after># -*- coding: utf-8; -*-
# This file is part of Superdesk.
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
#
# Creation: 2018-11-14 10:31
from superdesk.commands.data_updates import DataUpdate
from superdesk import get_resource_service
class DataUpdate(DataUpdate):
resource = 'vocabularies'
def forwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('single_value', False):
value = 'single selection'
else:
value = 'multi selection'
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'selection_type': value},
'$unset': {'single_value': 1}
})
def backwards(self, mongodb_collection, mongodb_database):
vocabularies_service = get_resource_service('vocabularies')
for vocabulary in vocabularies_service.get(req=None, lookup=None):
if vocabulary.get('selection_type') == 'single selection':
value = True
else:
value = False
mongodb_collection.update({'_id': vocabulary['_id']}, {
'$set': {'single_value': value},
'$unset': {'selection_type': 1}
})
|
|
e68057a0d81ff0ca2722f1a3641a795c2b168b9d
|
generate_cf_matrix_iris.py
|
generate_cf_matrix_iris.py
|
from sklearn.datasets import load_iris
import skfuzzy as fuzz
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import cosine_similarity
from sklearn import metrics
import warnings
warnings.filterwarnings('ignore')
data = load_iris()
X = data.data
# Adding random features to X
X = np.hstack((X,np.random.rand(len(X),1)))
Y = data.target
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(X.T, 3, 60, error=0.0001, maxiter=100000, init=None)
metrics.adjusted_rand_score(Y,np.argmax(u,axis=0))
# Creating the CF Matrix
clu = 1
CF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
summ=0
for d in range(X.shape[0]):
if np.argmax(u[:,d]) == clu:
summ+=X[d][i]*X[d][j]
CF[i,j]=summ
# Normalization:
NCF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
NCF[i,j]=CF[i,j]/(CF[i,j]+CF[j,j]-CF[i,j])
# Semantic Centroid
SC = np.zeros(X.shape[1])
for i in range(X.shape[1]):
SC[i] = np.sum(NCF[i,:])
# Cosine Similarity to select top features
cosim = cosine_similarity(SC,CF)
print(cosim)
top = 4
print("Top features are: ",(-cosim).argsort()[0][:top])
|
Remove IRIS CF Matrix generation notebook
|
Remove IRIS CF Matrix generation notebook
|
Python
|
mit
|
achyudhk/Ensemble-Text-Classification
|
Remove IRIS CF Matrix generation notebook
|
from sklearn.datasets import load_iris
import skfuzzy as fuzz
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import cosine_similarity
from sklearn import metrics
import warnings
warnings.filterwarnings('ignore')
data = load_iris()
X = data.data
# Adding random features to X
X = np.hstack((X,np.random.rand(len(X),1)))
Y = data.target
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(X.T, 3, 60, error=0.0001, maxiter=100000, init=None)
metrics.adjusted_rand_score(Y,np.argmax(u,axis=0))
# Creating the CF Matrix
clu = 1
CF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
summ=0
for d in range(X.shape[0]):
if np.argmax(u[:,d]) == clu:
summ+=X[d][i]*X[d][j]
CF[i,j]=summ
# Normalization:
NCF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
NCF[i,j]=CF[i,j]/(CF[i,j]+CF[j,j]-CF[i,j])
# Semantic Centroid
SC = np.zeros(X.shape[1])
for i in range(X.shape[1]):
SC[i] = np.sum(NCF[i,:])
# Cosine Similarity to select top features
cosim = cosine_similarity(SC,CF)
print(cosim)
top = 4
print("Top features are: ",(-cosim).argsort()[0][:top])
|
<commit_before><commit_msg>Remove IRIS CF Matrix generation notebook<commit_after>
|
from sklearn.datasets import load_iris
import skfuzzy as fuzz
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import cosine_similarity
from sklearn import metrics
import warnings
warnings.filterwarnings('ignore')
data = load_iris()
X = data.data
# Adding random features to X
X = np.hstack((X,np.random.rand(len(X),1)))
Y = data.target
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(X.T, 3, 60, error=0.0001, maxiter=100000, init=None)
metrics.adjusted_rand_score(Y,np.argmax(u,axis=0))
# Creating the CF Matrix
clu = 1
CF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
summ=0
for d in range(X.shape[0]):
if np.argmax(u[:,d]) == clu:
summ+=X[d][i]*X[d][j]
CF[i,j]=summ
# Normalization:
NCF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
NCF[i,j]=CF[i,j]/(CF[i,j]+CF[j,j]-CF[i,j])
# Semantic Centroid
SC = np.zeros(X.shape[1])
for i in range(X.shape[1]):
SC[i] = np.sum(NCF[i,:])
# Cosine Similarity to select top features
cosim = cosine_similarity(SC,CF)
print(cosim)
top = 4
print("Top features are: ",(-cosim).argsort()[0][:top])
|
Remove IRIS CF Matrix generation notebookfrom sklearn.datasets import load_iris
import skfuzzy as fuzz
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import cosine_similarity
from sklearn import metrics
import warnings
warnings.filterwarnings('ignore')
data = load_iris()
X = data.data
# Adding random features to X
X = np.hstack((X,np.random.rand(len(X),1)))
Y = data.target
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(X.T, 3, 60, error=0.0001, maxiter=100000, init=None)
metrics.adjusted_rand_score(Y,np.argmax(u,axis=0))
# Creating the CF Matrix
clu = 1
CF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
summ=0
for d in range(X.shape[0]):
if np.argmax(u[:,d]) == clu:
summ+=X[d][i]*X[d][j]
CF[i,j]=summ
# Normalization:
NCF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
NCF[i,j]=CF[i,j]/(CF[i,j]+CF[j,j]-CF[i,j])
# Semantic Centroid
SC = np.zeros(X.shape[1])
for i in range(X.shape[1]):
SC[i] = np.sum(NCF[i,:])
# Cosine Similarity to select top features
cosim = cosine_similarity(SC,CF)
print(cosim)
top = 4
print("Top features are: ",(-cosim).argsort()[0][:top])
|
<commit_before><commit_msg>Remove IRIS CF Matrix generation notebook<commit_after>from sklearn.datasets import load_iris
import skfuzzy as fuzz
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import cosine_similarity
from sklearn import metrics
import warnings
warnings.filterwarnings('ignore')
data = load_iris()
X = data.data
# Adding random features to X
X = np.hstack((X,np.random.rand(len(X),1)))
Y = data.target
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(X.T, 3, 60, error=0.0001, maxiter=100000, init=None)
metrics.adjusted_rand_score(Y,np.argmax(u,axis=0))
# Creating the CF Matrix
clu = 1
CF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
summ=0
for d in range(X.shape[0]):
if np.argmax(u[:,d]) == clu:
summ+=X[d][i]*X[d][j]
CF[i,j]=summ
# Normalization:
NCF = np.zeros((X.shape[1],X.shape[1]))
for i in range(X.shape[1]):
for j in range(X.shape[1]):
NCF[i,j]=CF[i,j]/(CF[i,j]+CF[j,j]-CF[i,j])
# Semantic Centroid
SC = np.zeros(X.shape[1])
for i in range(X.shape[1]):
SC[i] = np.sum(NCF[i,:])
# Cosine Similarity to select top features
cosim = cosine_similarity(SC,CF)
print(cosim)
top = 4
print("Top features are: ",(-cosim).argsort()[0][:top])
|
|
118f42c9155b746f0fd37a2a4a4258e39d50d6ca
|
kitsune/kbadge/migrations/0002_auto_20181023_1319.py
|
kitsune/kbadge/migrations/0002_auto_20181023_1319.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
Add data migration to fix image paths on pre-existing badges.
|
Add data migration to fix image paths on pre-existing badges.
|
Python
|
bsd-3-clause
|
anushbmx/kitsune,anushbmx/kitsune,mozilla/kitsune,anushbmx/kitsune,mozilla/kitsune,mozilla/kitsune,anushbmx/kitsune,mozilla/kitsune
|
Add data migration to fix image paths on pre-existing badges.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
<commit_before><commit_msg>Add data migration to fix image paths on pre-existing badges.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
Add data migration to fix image paths on pre-existing badges.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
<commit_before><commit_msg>Add data migration to fix image paths on pre-existing badges.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
|
99da8c760624f88af5bf352ec29c71588501174c
|
testmodel/webapp/selenium/test/log-in-test.py
|
testmodel/webapp/selenium/test/log-in-test.py
|
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/begin.do')
def testLogin(self):
login_link = self.browser.find_element_by_link_text('Log in')
self.assertIsNotNone(login_link)
login_link.click()
username = self.browser.find_element_by_name('username')
self.assertIsNotNone(username)
username.send_keys('intermine-test-user')
password = self.browser.find_element_by_name('password')
self.assertIsNotNone(password)
password.send_keys('intermine-test-user-password')
submit = self.browser.find_element_by_name('action')
submit.click()
logged_in_as = self.browser.find_element_by_css_selector('#loginbar li:nth-child(2)')
self.assertEqual('intermine-test-user', logged_in_as.text)
|
Test ability to log in
|
Test ability to log in
Former-commit-id: b29fa5c96fa5e0fdd2117164baace9ac8492867d
|
Python
|
lgpl-2.1
|
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
|
Test ability to log in
Former-commit-id: b29fa5c96fa5e0fdd2117164baace9ac8492867d
|
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/begin.do')
def testLogin(self):
login_link = self.browser.find_element_by_link_text('Log in')
self.assertIsNotNone(login_link)
login_link.click()
username = self.browser.find_element_by_name('username')
self.assertIsNotNone(username)
username.send_keys('intermine-test-user')
password = self.browser.find_element_by_name('password')
self.assertIsNotNone(password)
password.send_keys('intermine-test-user-password')
submit = self.browser.find_element_by_name('action')
submit.click()
logged_in_as = self.browser.find_element_by_css_selector('#loginbar li:nth-child(2)')
self.assertEqual('intermine-test-user', logged_in_as.text)
|
<commit_before><commit_msg>Test ability to log in
Former-commit-id: b29fa5c96fa5e0fdd2117164baace9ac8492867d<commit_after>
|
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/begin.do')
def testLogin(self):
login_link = self.browser.find_element_by_link_text('Log in')
self.assertIsNotNone(login_link)
login_link.click()
username = self.browser.find_element_by_name('username')
self.assertIsNotNone(username)
username.send_keys('intermine-test-user')
password = self.browser.find_element_by_name('password')
self.assertIsNotNone(password)
password.send_keys('intermine-test-user-password')
submit = self.browser.find_element_by_name('action')
submit.click()
logged_in_as = self.browser.find_element_by_css_selector('#loginbar li:nth-child(2)')
self.assertEqual('intermine-test-user', logged_in_as.text)
|
Test ability to log in
Former-commit-id: b29fa5c96fa5e0fdd2117164baace9ac8492867dimport unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/begin.do')
def testLogin(self):
login_link = self.browser.find_element_by_link_text('Log in')
self.assertIsNotNone(login_link)
login_link.click()
username = self.browser.find_element_by_name('username')
self.assertIsNotNone(username)
username.send_keys('intermine-test-user')
password = self.browser.find_element_by_name('password')
self.assertIsNotNone(password)
password.send_keys('intermine-test-user-password')
submit = self.browser.find_element_by_name('action')
submit.click()
logged_in_as = self.browser.find_element_by_css_selector('#loginbar li:nth-child(2)')
self.assertEqual('intermine-test-user', logged_in_as.text)
|
<commit_before><commit_msg>Test ability to log in
Former-commit-id: b29fa5c96fa5e0fdd2117164baace9ac8492867d<commit_after>import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/begin.do')
def testLogin(self):
login_link = self.browser.find_element_by_link_text('Log in')
self.assertIsNotNone(login_link)
login_link.click()
username = self.browser.find_element_by_name('username')
self.assertIsNotNone(username)
username.send_keys('intermine-test-user')
password = self.browser.find_element_by_name('password')
self.assertIsNotNone(password)
password.send_keys('intermine-test-user-password')
submit = self.browser.find_element_by_name('action')
submit.click()
logged_in_as = self.browser.find_element_by_css_selector('#loginbar li:nth-child(2)')
self.assertEqual('intermine-test-user', logged_in_as.text)
|
|
578db98c1d61dbb58f2f617951b06beff2992d2e
|
scripts/pydev-launch.py
|
scripts/pydev-launch.py
|
#!/usr/bin/env python2.7
'''
This module supports launching agents from Eclipse PyDev with minimal
configuration. To get it working, follow these simple steps:
1. Create a new "Run Configuration" named "Launch VOLTTRON
Agent" and point the "Main Module" to this script (e.g.,
${project_loc}/scripts/launch.py).
2. On the Arguments tab, use ${resource_loc} as the first
argument in "Program Arguments" and set the "Working
directory" to Default.
That is all that is required, assuming the default interpreter is
pointing to the virtualenv Python.
This script will automatically set the AGENT_CONFIG, AGENT_SUB_ADDR, and
AGENT_PUB_ADDR environment variables, if they are not already set, as
well as add the module to sys.path, then transfer execution to the
script. One caveat is that the config file needs to be named config and
must exist in the directory directly below the root package of the
script.
'''
import os.path
import runpy
import sys
# Find the root path, to add to sys.path, and the module name
path, filename = os.path.split(os.path.abspath(sys.argv[1]))
assert filename.endswith('.py')
module = [filename[:-3]]
while path:
if not os.path.exists(os.path.join(path, '__init__.py')):
break
path, package = os.path.split(path)
module.insert(0, package)
module = '.'.join(module)
# Add environment variables required to execute agents
try:
home = os.environ['VOLTTRON_HOME']
except KeyError:
home = os.path.expanduser('~/.volttron')
if 'AGENT_CONFIG' not in os.environ:
config = os.path.join(path, 'config')
if os.path.exists(config):
os.environ['AGENT_CONFIG'] = config
if 'AGENT_SUB_ADDR' not in os.environ:
os.environ['AGENT_SUB_ADDR'] = 'ipc://@/%s/run/subscribe' % home
if 'AGENT_PUB_ADDR' not in os.environ:
os.environ['AGENT_PUB_ADDR'] = 'ipc://@/%s/run/publish' % home
# Remove this script from sys.argv
del sys.argv[0]
# Append agent root directory to sys.path
sys.path.append(path)
# Transfer execution to the agent module
runpy.run_module(module, run_name='__main__')
|
Add helper script for executing agents in PyDev.
|
Add helper script for executing agents in PyDev.
|
Python
|
bsd-2-clause
|
schandrika/volttron,schandrika/volttron,schandrika/volttron,schandrika/volttron
|
Add helper script for executing agents in PyDev.
|
#!/usr/bin/env python2.7
'''
This module supports launching agents from Eclipse PyDev with minimal
configuration. To get it working, follow these simple steps:
1. Create a new "Run Configuration" named "Launch VOLTTRON
Agent" and point the "Main Module" to this script (e.g.,
${project_loc}/scripts/launch.py).
2. On the Arguments tab, use ${resource_loc} as the first
argument in "Program Arguments" and set the "Working
directory" to Default.
That is all that is required, assuming the default interpreter is
pointing to the virtualenv Python.
This script will automatically set the AGENT_CONFIG, AGENT_SUB_ADDR, and
AGENT_PUB_ADDR environment variables, if they are not already set, as
well as add the module to sys.path, then transfer execution to the
script. One caveat is that the config file needs to be named config and
must exist in the directory directly below the root package of the
script.
'''
import os.path
import runpy
import sys
# Find the root path, to add to sys.path, and the module name
path, filename = os.path.split(os.path.abspath(sys.argv[1]))
assert filename.endswith('.py')
module = [filename[:-3]]
while path:
if not os.path.exists(os.path.join(path, '__init__.py')):
break
path, package = os.path.split(path)
module.insert(0, package)
module = '.'.join(module)
# Add environment variables required to execute agents
try:
home = os.environ['VOLTTRON_HOME']
except KeyError:
home = os.path.expanduser('~/.volttron')
if 'AGENT_CONFIG' not in os.environ:
config = os.path.join(path, 'config')
if os.path.exists(config):
os.environ['AGENT_CONFIG'] = config
if 'AGENT_SUB_ADDR' not in os.environ:
os.environ['AGENT_SUB_ADDR'] = 'ipc://@/%s/run/subscribe' % home
if 'AGENT_PUB_ADDR' not in os.environ:
os.environ['AGENT_PUB_ADDR'] = 'ipc://@/%s/run/publish' % home
# Remove this script from sys.argv
del sys.argv[0]
# Append agent root directory to sys.path
sys.path.append(path)
# Transfer execution to the agent module
runpy.run_module(module, run_name='__main__')
|
<commit_before><commit_msg>Add helper script for executing agents in PyDev.<commit_after>
|
#!/usr/bin/env python2.7
'''
This module supports launching agents from Eclipse PyDev with minimal
configuration. To get it working, follow these simple steps:
1. Create a new "Run Configuration" named "Launch VOLTTRON
Agent" and point the "Main Module" to this script (e.g.,
${project_loc}/scripts/launch.py).
2. On the Arguments tab, use ${resource_loc} as the first
argument in "Program Arguments" and set the "Working
directory" to Default.
That is all that is required, assuming the default interpreter is
pointing to the virtualenv Python.
This script will automatically set the AGENT_CONFIG, AGENT_SUB_ADDR, and
AGENT_PUB_ADDR environment variables, if they are not already set, as
well as add the module to sys.path, then transfer execution to the
script. One caveat is that the config file needs to be named config and
must exist in the directory directly below the root package of the
script.
'''
import os.path
import runpy
import sys
# Find the root path, to add to sys.path, and the module name
path, filename = os.path.split(os.path.abspath(sys.argv[1]))
assert filename.endswith('.py')
module = [filename[:-3]]
while path:
if not os.path.exists(os.path.join(path, '__init__.py')):
break
path, package = os.path.split(path)
module.insert(0, package)
module = '.'.join(module)
# Add environment variables required to execute agents
try:
home = os.environ['VOLTTRON_HOME']
except KeyError:
home = os.path.expanduser('~/.volttron')
if 'AGENT_CONFIG' not in os.environ:
config = os.path.join(path, 'config')
if os.path.exists(config):
os.environ['AGENT_CONFIG'] = config
if 'AGENT_SUB_ADDR' not in os.environ:
os.environ['AGENT_SUB_ADDR'] = 'ipc://@/%s/run/subscribe' % home
if 'AGENT_PUB_ADDR' not in os.environ:
os.environ['AGENT_PUB_ADDR'] = 'ipc://@/%s/run/publish' % home
# Remove this script from sys.argv
del sys.argv[0]
# Append agent root directory to sys.path
sys.path.append(path)
# Transfer execution to the agent module
runpy.run_module(module, run_name='__main__')
|
Add helper script for executing agents in PyDev.#!/usr/bin/env python2.7
'''
This module supports launching agents from Eclipse PyDev with minimal
configuration. To get it working, follow these simple steps:
1. Create a new "Run Configuration" named "Launch VOLTTRON
Agent" and point the "Main Module" to this script (e.g.,
${project_loc}/scripts/launch.py).
2. On the Arguments tab, use ${resource_loc} as the first
argument in "Program Arguments" and set the "Working
directory" to Default.
That is all that is required, assuming the default interpreter is
pointing to the virtualenv Python.
This script will automatically set the AGENT_CONFIG, AGENT_SUB_ADDR, and
AGENT_PUB_ADDR environment variables, if they are not already set, as
well as add the module to sys.path, then transfer execution to the
script. One caveat is that the config file needs to be named config and
must exist in the directory directly below the root package of the
script.
'''
import os.path
import runpy
import sys
# Find the root path, to add to sys.path, and the module name
path, filename = os.path.split(os.path.abspath(sys.argv[1]))
assert filename.endswith('.py')
module = [filename[:-3]]
while path:
if not os.path.exists(os.path.join(path, '__init__.py')):
break
path, package = os.path.split(path)
module.insert(0, package)
module = '.'.join(module)
# Add environment variables required to execute agents
try:
home = os.environ['VOLTTRON_HOME']
except KeyError:
home = os.path.expanduser('~/.volttron')
if 'AGENT_CONFIG' not in os.environ:
config = os.path.join(path, 'config')
if os.path.exists(config):
os.environ['AGENT_CONFIG'] = config
if 'AGENT_SUB_ADDR' not in os.environ:
os.environ['AGENT_SUB_ADDR'] = 'ipc://@/%s/run/subscribe' % home
if 'AGENT_PUB_ADDR' not in os.environ:
os.environ['AGENT_PUB_ADDR'] = 'ipc://@/%s/run/publish' % home
# Remove this script from sys.argv
del sys.argv[0]
# Append agent root directory to sys.path
sys.path.append(path)
# Transfer execution to the agent module
runpy.run_module(module, run_name='__main__')
|
<commit_before><commit_msg>Add helper script for executing agents in PyDev.<commit_after>#!/usr/bin/env python2.7
'''
This module supports launching agents from Eclipse PyDev with minimal
configuration. To get it working, follow these simple steps:
1. Create a new "Run Configuration" named "Launch VOLTTRON
Agent" and point the "Main Module" to this script (e.g.,
${project_loc}/scripts/launch.py).
2. On the Arguments tab, use ${resource_loc} as the first
argument in "Program Arguments" and set the "Working
directory" to Default.
That is all that is required, assuming the default interpreter is
pointing to the virtualenv Python.
This script will automatically set the AGENT_CONFIG, AGENT_SUB_ADDR, and
AGENT_PUB_ADDR environment variables, if they are not already set, as
well as add the module to sys.path, then transfer execution to the
script. One caveat is that the config file needs to be named config and
must exist in the directory directly below the root package of the
script.
'''
import os.path
import runpy
import sys
# Find the root path, to add to sys.path, and the module name
path, filename = os.path.split(os.path.abspath(sys.argv[1]))
assert filename.endswith('.py')
module = [filename[:-3]]
while path:
if not os.path.exists(os.path.join(path, '__init__.py')):
break
path, package = os.path.split(path)
module.insert(0, package)
module = '.'.join(module)
# Add environment variables required to execute agents
try:
home = os.environ['VOLTTRON_HOME']
except KeyError:
home = os.path.expanduser('~/.volttron')
if 'AGENT_CONFIG' not in os.environ:
config = os.path.join(path, 'config')
if os.path.exists(config):
os.environ['AGENT_CONFIG'] = config
if 'AGENT_SUB_ADDR' not in os.environ:
os.environ['AGENT_SUB_ADDR'] = 'ipc://@/%s/run/subscribe' % home
if 'AGENT_PUB_ADDR' not in os.environ:
os.environ['AGENT_PUB_ADDR'] = 'ipc://@/%s/run/publish' % home
# Remove this script from sys.argv
del sys.argv[0]
# Append agent root directory to sys.path
sys.path.append(path)
# Transfer execution to the agent module
runpy.run_module(module, run_name='__main__')
|
|
664ad090e7b4c2922b5c89932e61d7ddef326da9
|
script/get_matrices.py
|
script/get_matrices.py
|
import sys
from HTMLParser import HTMLParser
class MyHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.state = 'NONE'
def handle_starttag(self, tag, attrs):
if self.state == 'FINISHED':
return
if tag == '<table>':
self.state = 'PARSING_TABLE'
print tag
elif tag == '<td>':
self.state ='PARSING_VALUE'
elif tag == '<tr>':
if skipped_header:
self.state = 'PARSING_ENTRY'
def handle_endtag(self, tag):
if tag == '<table>':
self.state ='FINISHED'
elif tag == '<td>':
self.state = 'PARSING_ENTRY'
elif tag == '<tr>':
self.state = 'PARSING_TABLE'
def handle_data(self, data):
if self.state == 'PARSING_VALUE':
print data
def main():
if len(sys.argv) != 2:
print "Usage: python get_matrices.py <html_file>"
return
f = open(sys.argv[1])
state = 'NONE'
entry_count = 0
text = ""
max_lines = 100
c = 0
for line in f:
text = text + '\n' + line
if c > max_lines:
break
c += 1
parser = MyHtmlParser()
parser.feed('<table>bau</table>')
if __name__ == '__main__':
main()
|
Add a simple python to fetch matrices from UoF collection.
|
Add a simple python to fetch matrices from UoF collection.
|
Python
|
mit
|
caskorg/cask,caskorg/cask,caskorg/cask,caskorg/cask,caskorg/cask
|
Add a simple python to fetch matrices from UoF collection.
|
import sys
from HTMLParser import HTMLParser
class MyHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.state = 'NONE'
def handle_starttag(self, tag, attrs):
if self.state == 'FINISHED':
return
if tag == '<table>':
self.state = 'PARSING_TABLE'
print tag
elif tag == '<td>':
self.state ='PARSING_VALUE'
elif tag == '<tr>':
if skipped_header:
self.state = 'PARSING_ENTRY'
def handle_endtag(self, tag):
if tag == '<table>':
self.state ='FINISHED'
elif tag == '<td>':
self.state = 'PARSING_ENTRY'
elif tag == '<tr>':
self.state = 'PARSING_TABLE'
def handle_data(self, data):
if self.state == 'PARSING_VALUE':
print data
def main():
if len(sys.argv) != 2:
print "Usage: python get_matrices.py <html_file>"
return
f = open(sys.argv[1])
state = 'NONE'
entry_count = 0
text = ""
max_lines = 100
c = 0
for line in f:
text = text + '\n' + line
if c > max_lines:
break
c += 1
parser = MyHtmlParser()
parser.feed('<table>bau</table>')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a simple python to fetch matrices from UoF collection.<commit_after>
|
import sys
from HTMLParser import HTMLParser
class MyHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.state = 'NONE'
def handle_starttag(self, tag, attrs):
if self.state == 'FINISHED':
return
if tag == '<table>':
self.state = 'PARSING_TABLE'
print tag
elif tag == '<td>':
self.state ='PARSING_VALUE'
elif tag == '<tr>':
if skipped_header:
self.state = 'PARSING_ENTRY'
def handle_endtag(self, tag):
if tag == '<table>':
self.state ='FINISHED'
elif tag == '<td>':
self.state = 'PARSING_ENTRY'
elif tag == '<tr>':
self.state = 'PARSING_TABLE'
def handle_data(self, data):
if self.state == 'PARSING_VALUE':
print data
def main():
if len(sys.argv) != 2:
print "Usage: python get_matrices.py <html_file>"
return
f = open(sys.argv[1])
state = 'NONE'
entry_count = 0
text = ""
max_lines = 100
c = 0
for line in f:
text = text + '\n' + line
if c > max_lines:
break
c += 1
parser = MyHtmlParser()
parser.feed('<table>bau</table>')
if __name__ == '__main__':
main()
|
Add a simple python to fetch matrices from UoF collection.import sys
from HTMLParser import HTMLParser
class MyHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.state = 'NONE'
def handle_starttag(self, tag, attrs):
if self.state == 'FINISHED':
return
if tag == '<table>':
self.state = 'PARSING_TABLE'
print tag
elif tag == '<td>':
self.state ='PARSING_VALUE'
elif tag == '<tr>':
if skipped_header:
self.state = 'PARSING_ENTRY'
def handle_endtag(self, tag):
if tag == '<table>':
self.state ='FINISHED'
elif tag == '<td>':
self.state = 'PARSING_ENTRY'
elif tag == '<tr>':
self.state = 'PARSING_TABLE'
def handle_data(self, data):
if self.state == 'PARSING_VALUE':
print data
def main():
if len(sys.argv) != 2:
print "Usage: python get_matrices.py <html_file>"
return
f = open(sys.argv[1])
state = 'NONE'
entry_count = 0
text = ""
max_lines = 100
c = 0
for line in f:
text = text + '\n' + line
if c > max_lines:
break
c += 1
parser = MyHtmlParser()
parser.feed('<table>bau</table>')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a simple python to fetch matrices from UoF collection.<commit_after>import sys
from HTMLParser import HTMLParser
class MyHtmlParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.state = 'NONE'
def handle_starttag(self, tag, attrs):
if self.state == 'FINISHED':
return
if tag == '<table>':
self.state = 'PARSING_TABLE'
print tag
elif tag == '<td>':
self.state ='PARSING_VALUE'
elif tag == '<tr>':
if skipped_header:
self.state = 'PARSING_ENTRY'
def handle_endtag(self, tag):
if tag == '<table>':
self.state ='FINISHED'
elif tag == '<td>':
self.state = 'PARSING_ENTRY'
elif tag == '<tr>':
self.state = 'PARSING_TABLE'
def handle_data(self, data):
if self.state == 'PARSING_VALUE':
print data
def main():
if len(sys.argv) != 2:
print "Usage: python get_matrices.py <html_file>"
return
f = open(sys.argv[1])
state = 'NONE'
entry_count = 0
text = ""
max_lines = 100
c = 0
for line in f:
text = text + '\n' + line
if c > max_lines:
break
c += 1
parser = MyHtmlParser()
parser.feed('<table>bau</table>')
if __name__ == '__main__':
main()
|
|
4e6458bddec9758da609c681a0ea05b43c399f50
|
bot/multithreading/worker/pool/workers/main.py
|
bot/multithreading/worker/pool/workers/main.py
|
import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
|
Create a worker pool to handle pool of workers that can grow or shrink as necessary
|
Create a worker pool to handle pool of workers that can grow or shrink as necessary
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
Create a worker pool to handle pool of workers that can grow or shrink as necessary
|
import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
|
<commit_before><commit_msg>Create a worker pool to handle pool of workers that can grow or shrink as necessary<commit_after>
|
import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
|
Create a worker pool to handle pool of workers that can grow or shrink as necessaryimport queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
|
<commit_before><commit_msg>Create a worker pool to handle pool of workers that can grow or shrink as necessary<commit_after>import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
|
|
82bc2b2cf63e61bc7282bb61e108a6815e8acd78
|
cyhdfs3/tests/test_cli.py
|
cyhdfs3/tests/test_cli.py
|
import click
from click.testing import CliRunner
from cyhdfs3 import cli
from utils import *
def test_create_file_list(hdfs):
runner = CliRunner()
result = runner.invoke(cli.ls)
# assert result.exit_code == 0
# assert result.output == 'Hello Peter!\n'
|
Add template for cli test
|
Add template for cli test
|
Python
|
apache-2.0
|
danielfrg/libhdfs3.py,danielfrg/cyhdfs3,danielfrg/libhdfs3.py,danielfrg/cyhdfs3
|
Add template for cli test
|
import click
from click.testing import CliRunner
from cyhdfs3 import cli
from utils import *
def test_create_file_list(hdfs):
runner = CliRunner()
result = runner.invoke(cli.ls)
# assert result.exit_code == 0
# assert result.output == 'Hello Peter!\n'
|
<commit_before><commit_msg>Add template for cli test<commit_after>
|
import click
from click.testing import CliRunner
from cyhdfs3 import cli
from utils import *
def test_create_file_list(hdfs):
runner = CliRunner()
result = runner.invoke(cli.ls)
# assert result.exit_code == 0
# assert result.output == 'Hello Peter!\n'
|
Add template for cli testimport click
from click.testing import CliRunner
from cyhdfs3 import cli
from utils import *
def test_create_file_list(hdfs):
runner = CliRunner()
result = runner.invoke(cli.ls)
# assert result.exit_code == 0
# assert result.output == 'Hello Peter!\n'
|
<commit_before><commit_msg>Add template for cli test<commit_after>import click
from click.testing import CliRunner
from cyhdfs3 import cli
from utils import *
def test_create_file_list(hdfs):
runner = CliRunner()
result = runner.invoke(cli.ls)
# assert result.exit_code == 0
# assert result.output == 'Hello Peter!\n'
|
|
3f59199b75c2f0679ca100519ae6e95b1c5613e0
|
checkheaders.py
|
checkheaders.py
|
import requests
#checking if the app is vulnerable to clickjacking
def check_clickjacking(url):
response = requests.get(url, verify=False)
try:
response_check_clickjacking = response.headers['x-frame-options']
if response_check_clickjacking == "DENY" or \
response_check_clickjacking == "SAMEORIGIN":
print "The app is not vulnerable to clickjacking"
else:
print response_check_clickjacking
except KeyError:
print "The app is vulnerable to clickjacking"
#checking is HSTS is implemented
def check_hsts(url):
response = requests.get(url, verify=False)
try:
response_check_hsts = response.headers['strict-transport-security']
if response_check_hsts:
print "The app has hsts implemented"
except KeyError:
print "The app doesn't have HSTS headers"
#perform sslscan for the url using sslscan and output it to a file.
def sslscan(url, output_file):
pass
#check if server version is disclosed in headers
def info_disclosure(url):
pass
|
Check response headers for clickjacking and HSTS headers
|
Check response headers for clickjacking and HSTS headers
|
Python
|
mit
|
satish28/security_automation
|
Check response headers for clickjacking and HSTS headers
|
import requests
#checking if the app is vulnerable to clickjacking
def check_clickjacking(url):
response = requests.get(url, verify=False)
try:
response_check_clickjacking = response.headers['x-frame-options']
if response_check_clickjacking == "DENY" or \
response_check_clickjacking == "SAMEORIGIN":
print "The app is not vulnerable to clickjacking"
else:
print response_check_clickjacking
except KeyError:
print "The app is vulnerable to clickjacking"
#checking is HSTS is implemented
def check_hsts(url):
response = requests.get(url, verify=False)
try:
response_check_hsts = response.headers['strict-transport-security']
if response_check_hsts:
print "The app has hsts implemented"
except KeyError:
print "The app doesn't have HSTS headers"
#perform sslscan for the url using sslscan and output it to a file.
def sslscan(url, output_file):
pass
#check if server version is disclosed in headers
def info_disclosure(url):
pass
|
<commit_before><commit_msg>Check response headers for clickjacking and HSTS headers<commit_after>
|
import requests
#checking if the app is vulnerable to clickjacking
def check_clickjacking(url):
response = requests.get(url, verify=False)
try:
response_check_clickjacking = response.headers['x-frame-options']
if response_check_clickjacking == "DENY" or \
response_check_clickjacking == "SAMEORIGIN":
print "The app is not vulnerable to clickjacking"
else:
print response_check_clickjacking
except KeyError:
print "The app is vulnerable to clickjacking"
#checking is HSTS is implemented
def check_hsts(url):
response = requests.get(url, verify=False)
try:
response_check_hsts = response.headers['strict-transport-security']
if response_check_hsts:
print "The app has hsts implemented"
except KeyError:
print "The app doesn't have HSTS headers"
#perform sslscan for the url using sslscan and output it to a file.
def sslscan(url, output_file):
pass
#check if server version is disclosed in headers
def info_disclosure(url):
pass
|
Check response headers for clickjacking and HSTS headersimport requests
#checking if the app is vulnerable to clickjacking
def check_clickjacking(url):
response = requests.get(url, verify=False)
try:
response_check_clickjacking = response.headers['x-frame-options']
if response_check_clickjacking == "DENY" or \
response_check_clickjacking == "SAMEORIGIN":
print "The app is not vulnerable to clickjacking"
else:
print response_check_clickjacking
except KeyError:
print "The app is vulnerable to clickjacking"
#checking is HSTS is implemented
def check_hsts(url):
response = requests.get(url, verify=False)
try:
response_check_hsts = response.headers['strict-transport-security']
if response_check_hsts:
print "The app has hsts implemented"
except KeyError:
print "The app doesn't have HSTS headers"
#perform sslscan for the url using sslscan and output it to a file.
def sslscan(url, output_file):
pass
#check if server version is disclosed in headers
def info_disclosure(url):
pass
|
<commit_before><commit_msg>Check response headers for clickjacking and HSTS headers<commit_after>import requests
#checking if the app is vulnerable to clickjacking
def check_clickjacking(url):
response = requests.get(url, verify=False)
try:
response_check_clickjacking = response.headers['x-frame-options']
if response_check_clickjacking == "DENY" or \
response_check_clickjacking == "SAMEORIGIN":
print "The app is not vulnerable to clickjacking"
else:
print response_check_clickjacking
except KeyError:
print "The app is vulnerable to clickjacking"
#checking is HSTS is implemented
def check_hsts(url):
response = requests.get(url, verify=False)
try:
response_check_hsts = response.headers['strict-transport-security']
if response_check_hsts:
print "The app has hsts implemented"
except KeyError:
print "The app doesn't have HSTS headers"
#perform sslscan for the url using sslscan and output it to a file.
def sslscan(url, output_file):
pass
#check if server version is disclosed in headers
def info_disclosure(url):
pass
|
|
407d7e5227716d80d1e90fcd7e763a112ff100d6
|
services/netflix.py
|
services/netflix.py
|
import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
|
Add Netflix back in ... maybe
|
Add Netflix back in ... maybe
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
Add Netflix back in ... maybe
|
import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
|
<commit_before><commit_msg>Add Netflix back in ... maybe<commit_after>
|
import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
|
Add Netflix back in ... maybeimport foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
|
<commit_before><commit_msg>Add Netflix back in ... maybe<commit_after>import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
|
|
c30a9b7a9d4558e66704af9892b8a8d327175a56
|
tools/perf/profile_creators/small_profile_creator.py
|
tools/perf/profile_creators/small_profile_creator.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25PageSet()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
Fix profile generation after r275633.
|
[Telemetry] Fix profile generation after r275633.
TBR=dtu@chromium.org
NOTRY=True
BUG=
Review URL: https://codereview.chromium.org/323703003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275689 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,ltilve/chromium,Just-D/chromium-1,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,Just-D/chromium-1,jaruba/chromium.src,dednal/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Jonekee/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,ondra-novak/chromium.src,dednal/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,littlstar/chromium.src,ltilve/chromium,dushu1203/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,dednal/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,littlstar/chromium.src,Fireblend/chromium-crosswalk
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
[Telemetry] Fix profile generation after r275633.
TBR=dtu@chromium.org
NOTRY=True
BUG=
Review URL: https://codereview.chromium.org/323703003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275689 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25PageSet()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
<commit_msg>[Telemetry] Fix profile generation after r275633.
TBR=dtu@chromium.org
NOTRY=True
BUG=
Review URL: https://codereview.chromium.org/323703003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275689 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25PageSet()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
[Telemetry] Fix profile generation after r275633.
TBR=dtu@chromium.org
NOTRY=True
BUG=
Review URL: https://codereview.chromium.org/323703003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275689 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25PageSet()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
<commit_msg>[Telemetry] Fix profile generation after r275633.
TBR=dtu@chromium.org
NOTRY=True
BUG=
Review URL: https://codereview.chromium.org/323703003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@275689 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import profile_creator
import page_sets
class SmallProfileCreator(profile_creator.ProfileCreator):
"""
Runs a browser through a series of operations to fill in a small test profile.
"""
def __init__(self):
super(SmallProfileCreator, self).__init__()
self._page_set = page_sets.Typical25PageSet()
# Open all links in the same tab save for the last _NUM_TABS links which
# are each opened in a new tab.
self._NUM_TABS = 5
def TabForPage(self, page, browser):
idx = page.page_set.pages.index(page)
# The last _NUM_TABS pages open a new tab.
if idx <= (len(page.page_set.pages) - self._NUM_TABS):
return browser.tabs[0]
else:
return browser.tabs.New()
def MeasurePage(self, _, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
|
92fb1bd323e69a625e38d48bb293e72787d84808
|
test/test_commands.py
|
test/test_commands.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test check the commands
"""
from __future__ import print_function
import os
import time
import shlex
import subprocess
import copy
import requests
import unittest2
class TestGroups(unittest2.TestCase):
"""
This class test hostgroups and tree feature
"""
@classmethod
def setUpClass(cls):
"""
This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the hostgroup
:return: None
"""
# Set test mode for Alignak backend
os.environ['TEST_ALIGNAK_BACKEND'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignakbackend:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
@classmethod
def tearDownClass(cls):
"""
Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
@classmethod
def tearDown(cls):
"""
Delete resources in backend
:return: None
"""
for resource in ['host', 'service', 'command', 'livestate', 'livesynthesis']:
requests.delete(cls.endpoint + '/' + resource, auth=cls.auth)
def test_default_commands(self):
"""
Default cmomands exist ..
:return:
"""
# get commands
response = requests.get(self.endpoint + '/command', auth=self.auth)
resp = response.json()
self.assertEqual(len(resp['_items']), 2)
for item in resp['_items']:
self.assertIn(item['name'], ['_echo', '_internal_host_up'])
|
Add test for default commands
|
Add test for default commands
|
Python
|
agpl-3.0
|
Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend
|
Add test for default commands
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test check the commands
"""
from __future__ import print_function
import os
import time
import shlex
import subprocess
import copy
import requests
import unittest2
class TestGroups(unittest2.TestCase):
"""
This class test hostgroups and tree feature
"""
@classmethod
def setUpClass(cls):
"""
This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the hostgroup
:return: None
"""
# Set test mode for Alignak backend
os.environ['TEST_ALIGNAK_BACKEND'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignakbackend:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
@classmethod
def tearDownClass(cls):
"""
Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
@classmethod
def tearDown(cls):
"""
Delete resources in backend
:return: None
"""
for resource in ['host', 'service', 'command', 'livestate', 'livesynthesis']:
requests.delete(cls.endpoint + '/' + resource, auth=cls.auth)
def test_default_commands(self):
"""
Default cmomands exist ..
:return:
"""
# get commands
response = requests.get(self.endpoint + '/command', auth=self.auth)
resp = response.json()
self.assertEqual(len(resp['_items']), 2)
for item in resp['_items']:
self.assertIn(item['name'], ['_echo', '_internal_host_up'])
|
<commit_before><commit_msg>Add test for default commands<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test check the commands
"""
from __future__ import print_function
import os
import time
import shlex
import subprocess
import copy
import requests
import unittest2
class TestGroups(unittest2.TestCase):
"""
This class test hostgroups and tree feature
"""
@classmethod
def setUpClass(cls):
"""
This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the hostgroup
:return: None
"""
# Set test mode for Alignak backend
os.environ['TEST_ALIGNAK_BACKEND'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignakbackend:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
@classmethod
def tearDownClass(cls):
"""
Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
@classmethod
def tearDown(cls):
"""
Delete resources in backend
:return: None
"""
for resource in ['host', 'service', 'command', 'livestate', 'livesynthesis']:
requests.delete(cls.endpoint + '/' + resource, auth=cls.auth)
def test_default_commands(self):
"""
Default cmomands exist ..
:return:
"""
# get commands
response = requests.get(self.endpoint + '/command', auth=self.auth)
resp = response.json()
self.assertEqual(len(resp['_items']), 2)
for item in resp['_items']:
self.assertIn(item['name'], ['_echo', '_internal_host_up'])
|
Add test for default commands#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test check the commands
"""
from __future__ import print_function
import os
import time
import shlex
import subprocess
import copy
import requests
import unittest2
class TestGroups(unittest2.TestCase):
"""
This class test hostgroups and tree feature
"""
@classmethod
def setUpClass(cls):
"""
This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the hostgroup
:return: None
"""
# Set test mode for Alignak backend
os.environ['TEST_ALIGNAK_BACKEND'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignakbackend:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
@classmethod
def tearDownClass(cls):
"""
Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
@classmethod
def tearDown(cls):
"""
Delete resources in backend
:return: None
"""
for resource in ['host', 'service', 'command', 'livestate', 'livesynthesis']:
requests.delete(cls.endpoint + '/' + resource, auth=cls.auth)
def test_default_commands(self):
"""
Default cmomands exist ..
:return:
"""
# get commands
response = requests.get(self.endpoint + '/command', auth=self.auth)
resp = response.json()
self.assertEqual(len(resp['_items']), 2)
for item in resp['_items']:
self.assertIn(item['name'], ['_echo', '_internal_host_up'])
|
<commit_before><commit_msg>Add test for default commands<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test check the commands
"""
from __future__ import print_function
import os
import time
import shlex
import subprocess
import copy
import requests
import unittest2
class TestGroups(unittest2.TestCase):
"""
This class test hostgroups and tree feature
"""
@classmethod
def setUpClass(cls):
"""
This method:
* deletes mongodb database
* starts the backend with uwsgi
* logs in the backend and get the token
* gets the hostgroup
:return: None
"""
# Set test mode for Alignak backend
os.environ['TEST_ALIGNAK_BACKEND'] = '1'
os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'] = 'alignak-backend-test'
# Delete used mongo DBs
exit_code = subprocess.call(
shlex.split(
'mongo %s --eval "db.dropDatabase()"' % os.environ['ALIGNAK_BACKEND_MONGO_DBNAME'])
)
assert exit_code == 0
cls.p = subprocess.Popen(['uwsgi', '--plugin', 'python', '-w', 'alignakbackend:app',
'--socket', '0.0.0.0:5000',
'--protocol=http', '--enable-threads', '--pidfile',
'/tmp/uwsgi.pid'])
time.sleep(3)
cls.endpoint = 'http://127.0.0.1:5000'
headers = {'Content-Type': 'application/json'}
params = {'username': 'admin', 'password': 'admin', 'action': 'generate'}
# get token
response = requests.post(cls.endpoint + '/login', json=params, headers=headers)
resp = response.json()
cls.token = resp['token']
cls.auth = requests.auth.HTTPBasicAuth(cls.token, '')
@classmethod
def tearDownClass(cls):
"""
Kill uwsgi
:return: None
"""
subprocess.call(['uwsgi', '--stop', '/tmp/uwsgi.pid'])
time.sleep(2)
@classmethod
def tearDown(cls):
"""
Delete resources in backend
:return: None
"""
for resource in ['host', 'service', 'command', 'livestate', 'livesynthesis']:
requests.delete(cls.endpoint + '/' + resource, auth=cls.auth)
def test_default_commands(self):
"""
Default cmomands exist ..
:return:
"""
# get commands
response = requests.get(self.endpoint + '/command', auth=self.auth)
resp = response.json()
self.assertEqual(len(resp['_items']), 2)
for item in resp['_items']:
self.assertIn(item['name'], ['_echo', '_internal_host_up'])
|
|
eac77b1b290ec7f7fb9e90640a3bce0510f2717b
|
tests/test_confusion.py
|
tests/test_confusion.py
|
from test_util import *
from funkyyak import grad
## See Siskind & Pearlmutter (2008), "Nesting forward-mode AD in a
## functional framework", Higher Order and Symbolic Computation
## 21(4):361-76, doi:10.1007/s10990-008-9037-1
def test_nest_hosc():
shouldBeTwo = grad (lambda x: grad (lambda y: x*y) (2.0)) (1.0)
check_equivalent(shouldBeTwo, 2.0)
|
Test for perturbation confusion with nesting
|
Test for perturbation confusion with nesting
|
Python
|
mit
|
barak/autograd
|
Test for perturbation confusion with nesting
|
from test_util import *
from funkyyak import grad
## See Siskind & Pearlmutter (2008), "Nesting forward-mode AD in a
## functional framework", Higher Order and Symbolic Computation
## 21(4):361-76, doi:10.1007/s10990-008-9037-1
def test_nest_hosc():
shouldBeTwo = grad (lambda x: grad (lambda y: x*y) (2.0)) (1.0)
check_equivalent(shouldBeTwo, 2.0)
|
<commit_before><commit_msg>Test for perturbation confusion with nesting<commit_after>
|
from test_util import *
from funkyyak import grad
## See Siskind & Pearlmutter (2008), "Nesting forward-mode AD in a
## functional framework", Higher Order and Symbolic Computation
## 21(4):361-76, doi:10.1007/s10990-008-9037-1
def test_nest_hosc():
shouldBeTwo = grad (lambda x: grad (lambda y: x*y) (2.0)) (1.0)
check_equivalent(shouldBeTwo, 2.0)
|
Test for perturbation confusion with nestingfrom test_util import *
from funkyyak import grad
## See Siskind & Pearlmutter (2008), "Nesting forward-mode AD in a
## functional framework", Higher Order and Symbolic Computation
## 21(4):361-76, doi:10.1007/s10990-008-9037-1
def test_nest_hosc():
shouldBeTwo = grad (lambda x: grad (lambda y: x*y) (2.0)) (1.0)
check_equivalent(shouldBeTwo, 2.0)
|
<commit_before><commit_msg>Test for perturbation confusion with nesting<commit_after>from test_util import *
from funkyyak import grad
## See Siskind & Pearlmutter (2008), "Nesting forward-mode AD in a
## functional framework", Higher Order and Symbolic Computation
## 21(4):361-76, doi:10.1007/s10990-008-9037-1
def test_nest_hosc():
shouldBeTwo = grad (lambda x: grad (lambda y: x*y) (2.0)) (1.0)
check_equivalent(shouldBeTwo, 2.0)
|
|
48baae8e7d1abec1f3949097df3263e64b1e6c8f
|
tests/test_receivers.py
|
tests/test_receivers.py
|
"""Tests for the Socket Receiver"""
from unittest.mock import patch
from nose.tools import raises, eq_, ok_
import multilog.receivers as receivers
from multilog.handlers import LogHandler
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_defaults(tcp_init_mock):
receiver = receivers.LogReceiver()
tcp_init_mock.assert_called_with(receiver, (receivers.DEFAULT_HOST, receivers.DEFAULT_PORT), LogHandler)
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_params(tcp_init_mock):
receiver = receivers.LogReceiver(host="HOST", port=1313, handler="HANDLER")
tcp_init_mock.assert_called_with(receiver, ("HOST", 1313), "HANDLER")
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("select.select")
@patch("multilog.receivers.LogReceiver.handle_request")
def test_serve_until_stopped(handle_request, select_mock):
receiver = receivers.LogReceiver()
def abort_select(*args):
receiver.abort = 1
return ([True], None, None)
select_mock.side_effect = abort_select
receiver.serve_until_stopped()
ok_(select_mock.called)
handle_request.assert_called_with()
eq_(receiver.abort, 1)
|
Add unit tests for the log receivers.
|
Add unit tests for the log receivers.
|
Python
|
mit
|
humangeo/multilog
|
Add unit tests for the log receivers.
|
"""Tests for the Socket Receiver"""
from unittest.mock import patch
from nose.tools import raises, eq_, ok_
import multilog.receivers as receivers
from multilog.handlers import LogHandler
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_defaults(tcp_init_mock):
receiver = receivers.LogReceiver()
tcp_init_mock.assert_called_with(receiver, (receivers.DEFAULT_HOST, receivers.DEFAULT_PORT), LogHandler)
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_params(tcp_init_mock):
receiver = receivers.LogReceiver(host="HOST", port=1313, handler="HANDLER")
tcp_init_mock.assert_called_with(receiver, ("HOST", 1313), "HANDLER")
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("select.select")
@patch("multilog.receivers.LogReceiver.handle_request")
def test_serve_until_stopped(handle_request, select_mock):
receiver = receivers.LogReceiver()
def abort_select(*args):
receiver.abort = 1
return ([True], None, None)
select_mock.side_effect = abort_select
receiver.serve_until_stopped()
ok_(select_mock.called)
handle_request.assert_called_with()
eq_(receiver.abort, 1)
|
<commit_before><commit_msg>Add unit tests for the log receivers.<commit_after>
|
"""Tests for the Socket Receiver"""
from unittest.mock import patch
from nose.tools import raises, eq_, ok_
import multilog.receivers as receivers
from multilog.handlers import LogHandler
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_defaults(tcp_init_mock):
receiver = receivers.LogReceiver()
tcp_init_mock.assert_called_with(receiver, (receivers.DEFAULT_HOST, receivers.DEFAULT_PORT), LogHandler)
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_params(tcp_init_mock):
receiver = receivers.LogReceiver(host="HOST", port=1313, handler="HANDLER")
tcp_init_mock.assert_called_with(receiver, ("HOST", 1313), "HANDLER")
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("select.select")
@patch("multilog.receivers.LogReceiver.handle_request")
def test_serve_until_stopped(handle_request, select_mock):
receiver = receivers.LogReceiver()
def abort_select(*args):
receiver.abort = 1
return ([True], None, None)
select_mock.side_effect = abort_select
receiver.serve_until_stopped()
ok_(select_mock.called)
handle_request.assert_called_with()
eq_(receiver.abort, 1)
|
Add unit tests for the log receivers."""Tests for the Socket Receiver"""
from unittest.mock import patch
from nose.tools import raises, eq_, ok_
import multilog.receivers as receivers
from multilog.handlers import LogHandler
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_defaults(tcp_init_mock):
receiver = receivers.LogReceiver()
tcp_init_mock.assert_called_with(receiver, (receivers.DEFAULT_HOST, receivers.DEFAULT_PORT), LogHandler)
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_params(tcp_init_mock):
receiver = receivers.LogReceiver(host="HOST", port=1313, handler="HANDLER")
tcp_init_mock.assert_called_with(receiver, ("HOST", 1313), "HANDLER")
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("select.select")
@patch("multilog.receivers.LogReceiver.handle_request")
def test_serve_until_stopped(handle_request, select_mock):
receiver = receivers.LogReceiver()
def abort_select(*args):
receiver.abort = 1
return ([True], None, None)
select_mock.side_effect = abort_select
receiver.serve_until_stopped()
ok_(select_mock.called)
handle_request.assert_called_with()
eq_(receiver.abort, 1)
|
<commit_before><commit_msg>Add unit tests for the log receivers.<commit_after>"""Tests for the Socket Receiver"""
from unittest.mock import patch
from nose.tools import raises, eq_, ok_
import multilog.receivers as receivers
from multilog.handlers import LogHandler
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_defaults(tcp_init_mock):
receiver = receivers.LogReceiver()
tcp_init_mock.assert_called_with(receiver, (receivers.DEFAULT_HOST, receivers.DEFAULT_PORT), LogHandler)
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("socketserver.ThreadingTCPServer.__init__")
def test_receiver_init_params(tcp_init_mock):
receiver = receivers.LogReceiver(host="HOST", port=1313, handler="HANDLER")
tcp_init_mock.assert_called_with(receiver, ("HOST", 1313), "HANDLER")
eq_(receiver.abort, 0)
eq_(receiver.timeout, 1)
eq_(receiver.logname, None)
@patch("select.select")
@patch("multilog.receivers.LogReceiver.handle_request")
def test_serve_until_stopped(handle_request, select_mock):
receiver = receivers.LogReceiver()
def abort_select(*args):
receiver.abort = 1
return ([True], None, None)
select_mock.side_effect = abort_select
receiver.serve_until_stopped()
ok_(select_mock.called)
handle_request.assert_called_with()
eq_(receiver.abort, 1)
|
|
d97e37ed121868249c2e11a9ee31dda1c603552e
|
compare-miso.py
|
compare-miso.py
|
"""
this script is to do all pairwise comparisons of each class of events
called by MISO. It assumes MISO has already been run on each sample
and there is a directory structure of:
miso_dir/control-RI
miso_dir/knockdown-RI
where before the - is the samplename and after the - is the event type.
It then calculates all pairwise comparisons of samples for each event type.
"""
import fnmatch
import os
import logging
import subprocess
from argparse import ArgumentParser
from itertools import combinations
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
datefmt='%m-%d-%y %H:%M:%S')
logger = logging.getLogger("miso-compare")
EVENTTYPES = ["A3SS", "A5SS", "MXE", "RI", "SE"]
def is_misodir(dirname):
for etype in EVENTTYPES:
if dirname.endswith("-" + etype):
return True
return False
def drop_after_last(string, dropafter):
"""drop everything after the last match of dropafter"""
tokens = string.split(dropafter)[:-1]
return(dropafter.join(tokens))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--outdir", default="comparisons")
parser.add_argument("misodir", help="Toplevel MISO results directory.")
args = parser.parse_args()
misodirs = [x for x in os.listdir(args.misodir) if is_misodir(x)]
samples = set([drop_after_last(x, "-") for x in misodirs])
typestorun = set([x.split("-")[-1] for x in misodirs])
pairs = list(combinations(samples, 2))
miso_cmd = "compare_miso --compare-samples {s1} {s2} {args.outdir}"
for etype in typestorun:
for pair in pairs:
s1 = pair[0] + "-" + etype
s2 = pair[1] + "-" + etype
cmd = miso_cmd.format(**locals())
logger.info("Comparing %s and %s." %(s1, s2))
subprocess.check_call(cmd, shell=True)
|
Add script to run all pairwise MISO comparisons for each event type.
|
Add script to run all pairwise MISO comparisons for each event type.
|
Python
|
mit
|
roryk/junkdrawer,roryk/junkdrawer
|
Add script to run all pairwise MISO comparisons for each event type.
|
"""
this script is to do all pairwise comparisons of each class of events
called by MISO. It assumes MISO has already been run on each sample
and there is a directory structure of:
miso_dir/control-RI
miso_dir/knockdown-RI
where before the - is the samplename and after the - is the event type.
It then calculates all pairwise comparisons of samples for each event type.
"""
import fnmatch
import os
import logging
import subprocess
from argparse import ArgumentParser
from itertools import combinations
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
datefmt='%m-%d-%y %H:%M:%S')
logger = logging.getLogger("miso-compare")
EVENTTYPES = ["A3SS", "A5SS", "MXE", "RI", "SE"]
def is_misodir(dirname):
for etype in EVENTTYPES:
if dirname.endswith("-" + etype):
return True
return False
def drop_after_last(string, dropafter):
"""drop everything after the last match of dropafter"""
tokens = string.split(dropafter)[:-1]
return(dropafter.join(tokens))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--outdir", default="comparisons")
parser.add_argument("misodir", help="Toplevel MISO results directory.")
args = parser.parse_args()
misodirs = [x for x in os.listdir(args.misodir) if is_misodir(x)]
samples = set([drop_after_last(x, "-") for x in misodirs])
typestorun = set([x.split("-")[-1] for x in misodirs])
pairs = list(combinations(samples, 2))
miso_cmd = "compare_miso --compare-samples {s1} {s2} {args.outdir}"
for etype in typestorun:
for pair in pairs:
s1 = pair[0] + "-" + etype
s2 = pair[1] + "-" + etype
cmd = miso_cmd.format(**locals())
logger.info("Comparing %s and %s." %(s1, s2))
subprocess.check_call(cmd, shell=True)
|
<commit_before><commit_msg>Add script to run all pairwise MISO comparisons for each event type.<commit_after>
|
"""
this script is to do all pairwise comparisons of each class of events
called by MISO. It assumes MISO has already been run on each sample
and there is a directory structure of:
miso_dir/control-RI
miso_dir/knockdown-RI
where before the - is the samplename and after the - is the event type.
It then calculates all pairwise comparisons of samples for each event type.
"""
import fnmatch
import os
import logging
import subprocess
from argparse import ArgumentParser
from itertools import combinations
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
datefmt='%m-%d-%y %H:%M:%S')
logger = logging.getLogger("miso-compare")
EVENTTYPES = ["A3SS", "A5SS", "MXE", "RI", "SE"]
def is_misodir(dirname):
for etype in EVENTTYPES:
if dirname.endswith("-" + etype):
return True
return False
def drop_after_last(string, dropafter):
"""drop everything after the last match of dropafter"""
tokens = string.split(dropafter)[:-1]
return(dropafter.join(tokens))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--outdir", default="comparisons")
parser.add_argument("misodir", help="Toplevel MISO results directory.")
args = parser.parse_args()
misodirs = [x for x in os.listdir(args.misodir) if is_misodir(x)]
samples = set([drop_after_last(x, "-") for x in misodirs])
typestorun = set([x.split("-")[-1] for x in misodirs])
pairs = list(combinations(samples, 2))
miso_cmd = "compare_miso --compare-samples {s1} {s2} {args.outdir}"
for etype in typestorun:
for pair in pairs:
s1 = pair[0] + "-" + etype
s2 = pair[1] + "-" + etype
cmd = miso_cmd.format(**locals())
logger.info("Comparing %s and %s." %(s1, s2))
subprocess.check_call(cmd, shell=True)
|
Add script to run all pairwise MISO comparisons for each event type."""
this script is to do all pairwise comparisons of each class of events
called by MISO. It assumes MISO has already been run on each sample
and there is a directory structure of:
miso_dir/control-RI
miso_dir/knockdown-RI
where before the - is the samplename and after the - is the event type.
It then calculates all pairwise comparisons of samples for each event type.
"""
import fnmatch
import os
import logging
import subprocess
from argparse import ArgumentParser
from itertools import combinations
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
datefmt='%m-%d-%y %H:%M:%S')
logger = logging.getLogger("miso-compare")
EVENTTYPES = ["A3SS", "A5SS", "MXE", "RI", "SE"]
def is_misodir(dirname):
for etype in EVENTTYPES:
if dirname.endswith("-" + etype):
return True
return False
def drop_after_last(string, dropafter):
"""drop everything after the last match of dropafter"""
tokens = string.split(dropafter)[:-1]
return(dropafter.join(tokens))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--outdir", default="comparisons")
parser.add_argument("misodir", help="Toplevel MISO results directory.")
args = parser.parse_args()
misodirs = [x for x in os.listdir(args.misodir) if is_misodir(x)]
samples = set([drop_after_last(x, "-") for x in misodirs])
typestorun = set([x.split("-")[-1] for x in misodirs])
pairs = list(combinations(samples, 2))
miso_cmd = "compare_miso --compare-samples {s1} {s2} {args.outdir}"
for etype in typestorun:
for pair in pairs:
s1 = pair[0] + "-" + etype
s2 = pair[1] + "-" + etype
cmd = miso_cmd.format(**locals())
logger.info("Comparing %s and %s." %(s1, s2))
subprocess.check_call(cmd, shell=True)
|
<commit_before><commit_msg>Add script to run all pairwise MISO comparisons for each event type.<commit_after>"""
this script is to do all pairwise comparisons of each class of events
called by MISO. It assumes MISO has already been run on each sample
and there is a directory structure of:
miso_dir/control-RI
miso_dir/knockdown-RI
where before the - is the samplename and after the - is the event type.
It then calculates all pairwise comparisons of samples for each event type.
"""
import fnmatch
import os
import logging
import subprocess
from argparse import ArgumentParser
from itertools import combinations
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)s %(levelname)s %(message)s',
datefmt='%m-%d-%y %H:%M:%S')
logger = logging.getLogger("miso-compare")
EVENTTYPES = ["A3SS", "A5SS", "MXE", "RI", "SE"]
def is_misodir(dirname):
for etype in EVENTTYPES:
if dirname.endswith("-" + etype):
return True
return False
def drop_after_last(string, dropafter):
"""drop everything after the last match of dropafter"""
tokens = string.split(dropafter)[:-1]
return(dropafter.join(tokens))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("--outdir", default="comparisons")
parser.add_argument("misodir", help="Toplevel MISO results directory.")
args = parser.parse_args()
misodirs = [x for x in os.listdir(args.misodir) if is_misodir(x)]
samples = set([drop_after_last(x, "-") for x in misodirs])
typestorun = set([x.split("-")[-1] for x in misodirs])
pairs = list(combinations(samples, 2))
miso_cmd = "compare_miso --compare-samples {s1} {s2} {args.outdir}"
for etype in typestorun:
for pair in pairs:
s1 = pair[0] + "-" + etype
s2 = pair[1] + "-" + etype
cmd = miso_cmd.format(**locals())
logger.info("Comparing %s and %s." %(s1, s2))
subprocess.check_call(cmd, shell=True)
|
|
345581aa5ba2fe3b0e4288f47489b668cebfc162
|
tests/cli/test_repair.py
|
tests/cli/test_repair.py
|
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
|
from textwrap import dedent
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
def test_full(tmpdir, runner):
runner.write_with_general(dedent('''
[storage foo]
type = filesystem
path = {0}/foo/
fileext = .txt
''').format(str(tmpdir)))
foo = tmpdir.mkdir('foo')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
foo.join('toobroken.txt').write('')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
assert 'No UID' in result.output
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
in result.output
assert 'UID:' in foo.join('item.txt').read()
|
Add another test for full repair command
|
Add another test for full repair command
|
Python
|
mit
|
tribut/vdirsyncer,credativUK/vdirsyncer,untitaker/vdirsyncer,hobarrera/vdirsyncer,hobarrera/vdirsyncer,tribut/vdirsyncer,untitaker/vdirsyncer,credativUK/vdirsyncer,untitaker/vdirsyncer
|
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
Add another test for full repair command
|
from textwrap import dedent
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
def test_full(tmpdir, runner):
runner.write_with_general(dedent('''
[storage foo]
type = filesystem
path = {0}/foo/
fileext = .txt
''').format(str(tmpdir)))
foo = tmpdir.mkdir('foo')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
foo.join('toobroken.txt').write('')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
assert 'No UID' in result.output
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
in result.output
assert 'UID:' in foo.join('item.txt').read()
|
<commit_before>from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
<commit_msg>Add another test for full repair command<commit_after>
|
from textwrap import dedent
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
def test_full(tmpdir, runner):
runner.write_with_general(dedent('''
[storage foo]
type = filesystem
path = {0}/foo/
fileext = .txt
''').format(str(tmpdir)))
foo = tmpdir.mkdir('foo')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
foo.join('toobroken.txt').write('')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
assert 'No UID' in result.output
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
in result.output
assert 'UID:' in foo.join('item.txt').read()
|
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
Add another test for full repair commandfrom textwrap import dedent
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
def test_full(tmpdir, runner):
runner.write_with_general(dedent('''
[storage foo]
type = filesystem
path = {0}/foo/
fileext = .txt
''').format(str(tmpdir)))
foo = tmpdir.mkdir('foo')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
foo.join('toobroken.txt').write('')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
assert 'No UID' in result.output
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
in result.output
assert 'UID:' in foo.join('item.txt').read()
|
<commit_before>from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
<commit_msg>Add another test for full repair command<commit_after>from textwrap import dedent
from vdirsyncer.cli.utils import repair_storage
from vdirsyncer.storage.memory import MemoryStorage
from vdirsyncer.utils.vobject import Item
def test_repair_uids():
s = MemoryStorage()
s.upload(Item(u'BEGIN:VCARD\nEND:VCARD'))
repair_storage(s)
uid, = [s.get(href)[0].uid for href, etag in s.list()]
s.upload(Item(u'BEGIN:VCARD\nUID:{}\nEND:VCARD'.format(uid)))
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 == uid2
repair_storage(s)
uid1, uid2 = [s.get(href)[0].uid for href, etag in s.list()]
assert uid1 != uid2
def test_full(tmpdir, runner):
runner.write_with_general(dedent('''
[storage foo]
type = filesystem
path = {0}/foo/
fileext = .txt
''').format(str(tmpdir)))
foo = tmpdir.mkdir('foo')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
foo.join('item.txt').write('BEGIN:VCARD\nEND:VCARD')
foo.join('toobroken.txt').write('')
result = runner.invoke(['repair', 'foo'])
assert not result.exception
assert 'No UID' in result.output
assert 'warning: Item toobroken.txt can\'t be parsed, skipping' \
in result.output
assert 'UID:' in foo.join('item.txt').read()
|
389acb1972491d4ad982e3893193aaf60b0bdc0d
|
tests/test_processing.py
|
tests/test_processing.py
|
import mock
import pytest
from scrapi import events
from scrapi import settings
settings.DEBUG = False
settings.RAW_PROCESSING = ['storage', 'osf', 'foo', 'bar']
settings.NORMALIZED_PROCESSING = ['storage', 'osf', 'foo', 'bar']
from scrapi import processing
BLACKHOLE = lambda *_, **__: None
@pytest.fixture(autouse=True)
def no_events(monkeypatch):
monkeypatch.setattr('scrapi.processing.events.dispatch', BLACKHOLE)
@pytest.fixture
def get_processor(monkeypatch):
mock_get_proc = mock.MagicMock()
monkeypatch.setattr('scrapi.processing.get_processor', mock_get_proc)
return mock_get_proc
def test_normalized_calls_all(get_processor):
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_raw_calls_all(get_processor):
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_catches(monkeypatch, get_processor):
settings.NORMALIZED_PROCESSING = ['osf']
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._normalized_event', mock_event)
processing.process_normalized(raw_mock, raw_mock, {})
mock_event.assert_called_with(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_raw_catches(monkeypatch, get_processor):
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._raw_event', mock_event)
processing.process_raw(raw_mock)
mock_event.assert_any_call(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_raises_on_bad_processor():
with pytest.raises(NotImplementedError):
processing.get_processor("Baby, You're never there.")
|
Add tests for the processing module
|
Add tests for the processing module
|
Python
|
apache-2.0
|
mehanig/scrapi,mehanig/scrapi,fabianvf/scrapi,felliott/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,felliott/scrapi,alexgarciac/scrapi,icereval/scrapi,ostwald/scrapi,erinspace/scrapi,jeffreyliu3230/scrapi,CenterForOpenScience/scrapi
|
Add tests for the processing module
|
import mock
import pytest
from scrapi import events
from scrapi import settings
settings.DEBUG = False
settings.RAW_PROCESSING = ['storage', 'osf', 'foo', 'bar']
settings.NORMALIZED_PROCESSING = ['storage', 'osf', 'foo', 'bar']
from scrapi import processing
BLACKHOLE = lambda *_, **__: None
@pytest.fixture(autouse=True)
def no_events(monkeypatch):
monkeypatch.setattr('scrapi.processing.events.dispatch', BLACKHOLE)
@pytest.fixture
def get_processor(monkeypatch):
mock_get_proc = mock.MagicMock()
monkeypatch.setattr('scrapi.processing.get_processor', mock_get_proc)
return mock_get_proc
def test_normalized_calls_all(get_processor):
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_raw_calls_all(get_processor):
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_catches(monkeypatch, get_processor):
settings.NORMALIZED_PROCESSING = ['osf']
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._normalized_event', mock_event)
processing.process_normalized(raw_mock, raw_mock, {})
mock_event.assert_called_with(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_raw_catches(monkeypatch, get_processor):
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._raw_event', mock_event)
processing.process_raw(raw_mock)
mock_event.assert_any_call(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_raises_on_bad_processor():
with pytest.raises(NotImplementedError):
processing.get_processor("Baby, You're never there.")
|
<commit_before><commit_msg>Add tests for the processing module<commit_after>
|
import mock
import pytest
from scrapi import events
from scrapi import settings
settings.DEBUG = False
settings.RAW_PROCESSING = ['storage', 'osf', 'foo', 'bar']
settings.NORMALIZED_PROCESSING = ['storage', 'osf', 'foo', 'bar']
from scrapi import processing
BLACKHOLE = lambda *_, **__: None
@pytest.fixture(autouse=True)
def no_events(monkeypatch):
monkeypatch.setattr('scrapi.processing.events.dispatch', BLACKHOLE)
@pytest.fixture
def get_processor(monkeypatch):
mock_get_proc = mock.MagicMock()
monkeypatch.setattr('scrapi.processing.get_processor', mock_get_proc)
return mock_get_proc
def test_normalized_calls_all(get_processor):
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_raw_calls_all(get_processor):
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_catches(monkeypatch, get_processor):
settings.NORMALIZED_PROCESSING = ['osf']
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._normalized_event', mock_event)
processing.process_normalized(raw_mock, raw_mock, {})
mock_event.assert_called_with(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_raw_catches(monkeypatch, get_processor):
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._raw_event', mock_event)
processing.process_raw(raw_mock)
mock_event.assert_any_call(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_raises_on_bad_processor():
with pytest.raises(NotImplementedError):
processing.get_processor("Baby, You're never there.")
|
Add tests for the processing moduleimport mock
import pytest
from scrapi import events
from scrapi import settings
settings.DEBUG = False
settings.RAW_PROCESSING = ['storage', 'osf', 'foo', 'bar']
settings.NORMALIZED_PROCESSING = ['storage', 'osf', 'foo', 'bar']
from scrapi import processing
BLACKHOLE = lambda *_, **__: None
@pytest.fixture(autouse=True)
def no_events(monkeypatch):
monkeypatch.setattr('scrapi.processing.events.dispatch', BLACKHOLE)
@pytest.fixture
def get_processor(monkeypatch):
mock_get_proc = mock.MagicMock()
monkeypatch.setattr('scrapi.processing.get_processor', mock_get_proc)
return mock_get_proc
def test_normalized_calls_all(get_processor):
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_raw_calls_all(get_processor):
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_catches(monkeypatch, get_processor):
settings.NORMALIZED_PROCESSING = ['osf']
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._normalized_event', mock_event)
processing.process_normalized(raw_mock, raw_mock, {})
mock_event.assert_called_with(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_raw_catches(monkeypatch, get_processor):
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._raw_event', mock_event)
processing.process_raw(raw_mock)
mock_event.assert_any_call(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_raises_on_bad_processor():
with pytest.raises(NotImplementedError):
processing.get_processor("Baby, You're never there.")
|
<commit_before><commit_msg>Add tests for the processing module<commit_after>import mock
import pytest
from scrapi import events
from scrapi import settings
settings.DEBUG = False
settings.RAW_PROCESSING = ['storage', 'osf', 'foo', 'bar']
settings.NORMALIZED_PROCESSING = ['storage', 'osf', 'foo', 'bar']
from scrapi import processing
BLACKHOLE = lambda *_, **__: None
@pytest.fixture(autouse=True)
def no_events(monkeypatch):
monkeypatch.setattr('scrapi.processing.events.dispatch', BLACKHOLE)
@pytest.fixture
def get_processor(monkeypatch):
mock_get_proc = mock.MagicMock()
monkeypatch.setattr('scrapi.processing.get_processor', mock_get_proc)
return mock_get_proc
def test_normalized_calls_all(get_processor):
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_raw_calls_all(get_processor):
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_catches(monkeypatch, get_processor):
settings.NORMALIZED_PROCESSING = ['osf']
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._normalized_event', mock_event)
processing.process_normalized(raw_mock, raw_mock, {})
mock_event.assert_called_with(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_raw_catches(monkeypatch, get_processor):
mock_event = mock.Mock()
raw_mock = mock.MagicMock()
get_processor.side_effect = KeyError('You raise me uuuuup')
monkeypatch.setattr('scrapi.processing._raw_event', mock_event)
processing.process_raw(raw_mock)
mock_event.assert_any_call(events.FAILED, 'osf', raw_mock, exception=repr(get_processor.side_effect))
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_normalized(mock.MagicMock(), mock.MagicMock(), {})
for processor in settings.NORMALIZED_PROCESSING:
get_processor.assert_any_call(processor)
def test_normalized_calls_all_throwing(get_processor):
get_processor.side_effect = lambda x: Exception('Reasons') if x == 'storage' else mock.Mock()
processing.process_raw(mock.MagicMock())
for processor in settings.RAW_PROCESSING:
get_processor.assert_any_call(processor)
def test_raises_on_bad_processor():
with pytest.raises(NotImplementedError):
processing.get_processor("Baby, You're never there.")
|
|
db7fa706fa1a5285c8fdb00de4e9661c59839035
|
samples/tests/forms/test_collected_sample_form.py
|
samples/tests/forms/test_collected_sample_form.py
|
import datetime
from django.test import TestCase
from samples.forms import CollectedSampleForm
from samples.models import (
Patient, AdmissionNote, CollectedSample, CollectionType,
)
class CollectedSampleFormTest(TestCase):
def setUp(self):
patient = Patient(name="Collected Sample Form Patient")
patient.save()
self.admission_note = AdmissionNote.objects.create(
patient=patient, id_gal_origin="1234567890")
self.collection_type = CollectionType.objects.create(
method_name="Método de coleta teste primário",
is_primary=True,
)
self.other_collection_type = CollectionType.objects.create(
method_name="Método de coleta teste secundário",
is_primary=False,
)
def test_two_selected_methods_invalid(self):
form = CollectedSampleForm({
'collection_type': self.collection_type,
'other_collection_type': self.other_collection_type,
'collection_date': "30/12/2018",
})
self.assertFalse(form.is_valid())
|
Add first test: check if both collection type selected is invalid
|
:hammer: Add first test: check if both collection type selected is invalid
|
Python
|
mit
|
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
|
:hammer: Add first test: check if both collection type selected is invalid
|
import datetime
from django.test import TestCase
from samples.forms import CollectedSampleForm
from samples.models import (
Patient, AdmissionNote, CollectedSample, CollectionType,
)
class CollectedSampleFormTest(TestCase):
def setUp(self):
patient = Patient(name="Collected Sample Form Patient")
patient.save()
self.admission_note = AdmissionNote.objects.create(
patient=patient, id_gal_origin="1234567890")
self.collection_type = CollectionType.objects.create(
method_name="Método de coleta teste primário",
is_primary=True,
)
self.other_collection_type = CollectionType.objects.create(
method_name="Método de coleta teste secundário",
is_primary=False,
)
def test_two_selected_methods_invalid(self):
form = CollectedSampleForm({
'collection_type': self.collection_type,
'other_collection_type': self.other_collection_type,
'collection_date': "30/12/2018",
})
self.assertFalse(form.is_valid())
|
<commit_before><commit_msg>:hammer: Add first test: check if both collection type selected is invalid<commit_after>
|
import datetime
from django.test import TestCase
from samples.forms import CollectedSampleForm
from samples.models import (
Patient, AdmissionNote, CollectedSample, CollectionType,
)
class CollectedSampleFormTest(TestCase):
def setUp(self):
patient = Patient(name="Collected Sample Form Patient")
patient.save()
self.admission_note = AdmissionNote.objects.create(
patient=patient, id_gal_origin="1234567890")
self.collection_type = CollectionType.objects.create(
method_name="Método de coleta teste primário",
is_primary=True,
)
self.other_collection_type = CollectionType.objects.create(
method_name="Método de coleta teste secundário",
is_primary=False,
)
def test_two_selected_methods_invalid(self):
form = CollectedSampleForm({
'collection_type': self.collection_type,
'other_collection_type': self.other_collection_type,
'collection_date': "30/12/2018",
})
self.assertFalse(form.is_valid())
|
:hammer: Add first test: check if both collection type selected is invalidimport datetime
from django.test import TestCase
from samples.forms import CollectedSampleForm
from samples.models import (
Patient, AdmissionNote, CollectedSample, CollectionType,
)
class CollectedSampleFormTest(TestCase):
def setUp(self):
patient = Patient(name="Collected Sample Form Patient")
patient.save()
self.admission_note = AdmissionNote.objects.create(
patient=patient, id_gal_origin="1234567890")
self.collection_type = CollectionType.objects.create(
method_name="Método de coleta teste primário",
is_primary=True,
)
self.other_collection_type = CollectionType.objects.create(
method_name="Método de coleta teste secundário",
is_primary=False,
)
def test_two_selected_methods_invalid(self):
form = CollectedSampleForm({
'collection_type': self.collection_type,
'other_collection_type': self.other_collection_type,
'collection_date': "30/12/2018",
})
self.assertFalse(form.is_valid())
|
<commit_before><commit_msg>:hammer: Add first test: check if both collection type selected is invalid<commit_after>import datetime
from django.test import TestCase
from samples.forms import CollectedSampleForm
from samples.models import (
Patient, AdmissionNote, CollectedSample, CollectionType,
)
class CollectedSampleFormTest(TestCase):
def setUp(self):
patient = Patient(name="Collected Sample Form Patient")
patient.save()
self.admission_note = AdmissionNote.objects.create(
patient=patient, id_gal_origin="1234567890")
self.collection_type = CollectionType.objects.create(
method_name="Método de coleta teste primário",
is_primary=True,
)
self.other_collection_type = CollectionType.objects.create(
method_name="Método de coleta teste secundário",
is_primary=False,
)
def test_two_selected_methods_invalid(self):
form = CollectedSampleForm({
'collection_type': self.collection_type,
'other_collection_type': self.other_collection_type,
'collection_date': "30/12/2018",
})
self.assertFalse(form.is_valid())
|
|
15b219033d9c452c7e2d4f75595580b52f2d945e
|
deploy/flask-redirects.py
|
deploy/flask-redirects.py
|
import json
from flask import Flask, make_response, redirect, request
app = Flask(__name__)
PRODUCTION_DOMAIN = 'readthedocs.org'
@app.route('/')
def redirect_front():
version = 'latest'
language = 'en'
single_version = False
SUBDOMAIN = CNAME = False
if PRODUCTION_DOMAIN in request.host:
SUBDOMAIN = True
slug = request.host.split('.')[0]
path = "/home/docs/checkouts/readthedocs.org/user_builds/{slug}/metadata.json".format(slug=slug)
else:
try:
cname = request.host.split(':')[0]
except:
cname = request.host
CNAME = True
path = "/home/docs/checkouts/readthedocs.org/cnametoproject/{cname}/metadata.json".format(cname=cname)
try:
json_obj = json.load(file(path))
version = json_obj['version']
language = json_obj['language']
single_version = json_obj['single_version']
except Exception, e:
print e
if single_version:
if SUBDOMAIN:
sendfile = "/user_builds/{slug}/translations/{language}/{version}".format(slug=slug, language=language, version=version)
elif CNAME:
sendfile = "/cnametoproject/{cname}/translation/{language}/{version}/".format(cname=cname, language=language, version=version)
print "Redirecting {host} to {sendfile}".format(host=request.host, sendfile=sendfile)
return make_response('', 200, {'X-Send-File': sendfile})
else:
url = '/{language}/{version}/'.format(language=language, version=version)
print "Redirecting {host} to {url}".format(host=request.host, url=url)
return redirect(url)
if __name__ == '__main__':
app.run()
|
Add a small flask app for redirecting /.
|
Add a small flask app for redirecting /.
This should never hit the database,
and give us a nice place to make smarter redirects.
|
Python
|
mit
|
raven47git/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,emawind84/readthedocs.org,takluyver/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,d0ugal/readthedocs.org,hach-que/readthedocs.org,VishvajitP/readthedocs.org,kenshinthebattosai/readthedocs.org,Carreau/readthedocs.org,GovReady/readthedocs.org,soulshake/readthedocs.org,michaelmcandrew/readthedocs.org,clarkperkins/readthedocs.org,wanghaven/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,kdkeyser/readthedocs.org,mrshoki/readthedocs.org,fujita-shintaro/readthedocs.org,michaelmcandrew/readthedocs.org,wanghaven/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,dirn/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,stevepiercy/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,KamranMackey/readthedocs.org,kenshinthebattosai/readthedocs.org,mrshoki/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,nikolas/readthedocs.org,VishvajitP/readthedocs.org,kenwang76/readthedocs.org,Carreau/readthedocs.org,attakei/readthedocs-oauth,sunnyzwh/readthedocs.org,dirn/readthedocs.org,d0ugal/readthedocs.org,royalwang/readthedocs.org,titiushko/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,mrshoki/readthedocs.org,agjohnson/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,SteveViss/readthedocs.org,raven47git/readthedocs.org,stevepiercy/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,titiushko/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,safwanrahman/readthedocs.org,Tazer/readthedocs.org,hach-que/readthedocs.org,techtonik/readthedocs.org,sid-kap/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,gjtorikian/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,clarkperkins/readthedocs.org,emawind84/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,d0ugal/readthedocs.org,singingwolfboy/readthedocs.org,KamranMackey/readthedocs.org,nikolas/readthedocs.org,stevepiercy/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,dirn/readthedocs.org,jerel/readthedocs.org,asampat3090/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,wanghaven/readthedocs.org,pombredanne/readthedocs.org,laplaceliu/readthedocs.org,LukasBoersma/readthedocs.org,tddv/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,gjtorikian/readthedocs.org,Tazer/readthedocs.org,istresearch/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,takluyver/readthedocs.org,gjtorikian/readthedocs.org,laplaceliu/readthedocs.org,asampat3090/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,Carreau/readthedocs.org,safwanrahman/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,soulshake/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,takluyver/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,emawind84/readthedocs.org,LukasBoersma/readthedocs.org,titiushko/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,kdkeyser/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,kdkeyser/readthedocs.org,raven47git/readthedocs.org,SteveViss/readthedocs.org,agjohnson/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,sunnyzwh/readthedocs.org,sils1297/readthedocs.org,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,LukasBoersma/readthedocs.org,cgourlay/readthedocs.org,kenwang76/readthedocs.org,raven47git/readthedocs.org,sils1297/readthedocs.org,KamranMackey/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,CedarLogic/readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,wanghaven/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,kenshinthebattosai/readthedocs.org,jerel/readthedocs.org,techtonik/readthedocs.org,wijerasa/readthedocs.org
|
Add a small flask app for redirecting /.
This should never hit the database,
and give us a nice place to make smarter redirects.
|
import json
from flask import Flask, make_response, redirect, request
app = Flask(__name__)
PRODUCTION_DOMAIN = 'readthedocs.org'
@app.route('/')
def redirect_front():
version = 'latest'
language = 'en'
single_version = False
SUBDOMAIN = CNAME = False
if PRODUCTION_DOMAIN in request.host:
SUBDOMAIN = True
slug = request.host.split('.')[0]
path = "/home/docs/checkouts/readthedocs.org/user_builds/{slug}/metadata.json".format(slug=slug)
else:
try:
cname = request.host.split(':')[0]
except:
cname = request.host
CNAME = True
path = "/home/docs/checkouts/readthedocs.org/cnametoproject/{cname}/metadata.json".format(cname=cname)
try:
json_obj = json.load(file(path))
version = json_obj['version']
language = json_obj['language']
single_version = json_obj['single_version']
except Exception, e:
print e
if single_version:
if SUBDOMAIN:
sendfile = "/user_builds/{slug}/translations/{language}/{version}".format(slug=slug, language=language, version=version)
elif CNAME:
sendfile = "/cnametoproject/{cname}/translation/{language}/{version}/".format(cname=cname, language=language, version=version)
print "Redirecting {host} to {sendfile}".format(host=request.host, sendfile=sendfile)
return make_response('', 200, {'X-Send-File': sendfile})
else:
url = '/{language}/{version}/'.format(language=language, version=version)
print "Redirecting {host} to {url}".format(host=request.host, url=url)
return redirect(url)
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add a small flask app for redirecting /.
This should never hit the database,
and give us a nice place to make smarter redirects.<commit_after>
|
import json
from flask import Flask, make_response, redirect, request
app = Flask(__name__)
PRODUCTION_DOMAIN = 'readthedocs.org'
@app.route('/')
def redirect_front():
version = 'latest'
language = 'en'
single_version = False
SUBDOMAIN = CNAME = False
if PRODUCTION_DOMAIN in request.host:
SUBDOMAIN = True
slug = request.host.split('.')[0]
path = "/home/docs/checkouts/readthedocs.org/user_builds/{slug}/metadata.json".format(slug=slug)
else:
try:
cname = request.host.split(':')[0]
except:
cname = request.host
CNAME = True
path = "/home/docs/checkouts/readthedocs.org/cnametoproject/{cname}/metadata.json".format(cname=cname)
try:
json_obj = json.load(file(path))
version = json_obj['version']
language = json_obj['language']
single_version = json_obj['single_version']
except Exception, e:
print e
if single_version:
if SUBDOMAIN:
sendfile = "/user_builds/{slug}/translations/{language}/{version}".format(slug=slug, language=language, version=version)
elif CNAME:
sendfile = "/cnametoproject/{cname}/translation/{language}/{version}/".format(cname=cname, language=language, version=version)
print "Redirecting {host} to {sendfile}".format(host=request.host, sendfile=sendfile)
return make_response('', 200, {'X-Send-File': sendfile})
else:
url = '/{language}/{version}/'.format(language=language, version=version)
print "Redirecting {host} to {url}".format(host=request.host, url=url)
return redirect(url)
if __name__ == '__main__':
app.run()
|
Add a small flask app for redirecting /.
This should never hit the database,
and give us a nice place to make smarter redirects.import json
from flask import Flask, make_response, redirect, request
app = Flask(__name__)
PRODUCTION_DOMAIN = 'readthedocs.org'
@app.route('/')
def redirect_front():
version = 'latest'
language = 'en'
single_version = False
SUBDOMAIN = CNAME = False
if PRODUCTION_DOMAIN in request.host:
SUBDOMAIN = True
slug = request.host.split('.')[0]
path = "/home/docs/checkouts/readthedocs.org/user_builds/{slug}/metadata.json".format(slug=slug)
else:
try:
cname = request.host.split(':')[0]
except:
cname = request.host
CNAME = True
path = "/home/docs/checkouts/readthedocs.org/cnametoproject/{cname}/metadata.json".format(cname=cname)
try:
json_obj = json.load(file(path))
version = json_obj['version']
language = json_obj['language']
single_version = json_obj['single_version']
except Exception, e:
print e
if single_version:
if SUBDOMAIN:
sendfile = "/user_builds/{slug}/translations/{language}/{version}".format(slug=slug, language=language, version=version)
elif CNAME:
sendfile = "/cnametoproject/{cname}/translation/{language}/{version}/".format(cname=cname, language=language, version=version)
print "Redirecting {host} to {sendfile}".format(host=request.host, sendfile=sendfile)
return make_response('', 200, {'X-Send-File': sendfile})
else:
url = '/{language}/{version}/'.format(language=language, version=version)
print "Redirecting {host} to {url}".format(host=request.host, url=url)
return redirect(url)
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add a small flask app for redirecting /.
This should never hit the database,
and give us a nice place to make smarter redirects.<commit_after>import json
from flask import Flask, make_response, redirect, request
app = Flask(__name__)
PRODUCTION_DOMAIN = 'readthedocs.org'
@app.route('/')
def redirect_front():
version = 'latest'
language = 'en'
single_version = False
SUBDOMAIN = CNAME = False
if PRODUCTION_DOMAIN in request.host:
SUBDOMAIN = True
slug = request.host.split('.')[0]
path = "/home/docs/checkouts/readthedocs.org/user_builds/{slug}/metadata.json".format(slug=slug)
else:
try:
cname = request.host.split(':')[0]
except:
cname = request.host
CNAME = True
path = "/home/docs/checkouts/readthedocs.org/cnametoproject/{cname}/metadata.json".format(cname=cname)
try:
json_obj = json.load(file(path))
version = json_obj['version']
language = json_obj['language']
single_version = json_obj['single_version']
except Exception, e:
print e
if single_version:
if SUBDOMAIN:
sendfile = "/user_builds/{slug}/translations/{language}/{version}".format(slug=slug, language=language, version=version)
elif CNAME:
sendfile = "/cnametoproject/{cname}/translation/{language}/{version}/".format(cname=cname, language=language, version=version)
print "Redirecting {host} to {sendfile}".format(host=request.host, sendfile=sendfile)
return make_response('', 200, {'X-Send-File': sendfile})
else:
url = '/{language}/{version}/'.format(language=language, version=version)
print "Redirecting {host} to {url}".format(host=request.host, url=url)
return redirect(url)
if __name__ == '__main__':
app.run()
|
|
e7738a6c7c13a68f17b7b29d91363d93fdaaae5f
|
hackernews_scrapy/spiders/hackernews_spider.py
|
hackernews_scrapy/spiders/hackernews_spider.py
|
from scrapy.spiders import Spider
from scrapy.http import Request
from hackernews_scrapy.items import HackernewsScrapyItem
from scrapy.selector import Selector
class HackernewsSpider(Spider):
name = 'pythonhackernews'
allowed_hosts = ['news.ycombinator.com']
start_urls = ['https://news.ycombinator.com']
def parse(self, response):
"""Parse the HTML to get the information we need"""
html_xpath_selector = Selector(response)
titles = html_xpath_selector.xpath('//td[@class="title"]/a/text()').extract()
for title in titles:
item = HackernewsScrapyItem()
item['title'] = title
yield item
|
Add spider for Hacker News
|
Add spider for Hacker News
|
Python
|
mit
|
mdsrosa/hackernews_scrapy
|
Add spider for Hacker News
|
from scrapy.spiders import Spider
from scrapy.http import Request
from hackernews_scrapy.items import HackernewsScrapyItem
from scrapy.selector import Selector
class HackernewsSpider(Spider):
name = 'pythonhackernews'
allowed_hosts = ['news.ycombinator.com']
start_urls = ['https://news.ycombinator.com']
def parse(self, response):
"""Parse the HTML to get the information we need"""
html_xpath_selector = Selector(response)
titles = html_xpath_selector.xpath('//td[@class="title"]/a/text()').extract()
for title in titles:
item = HackernewsScrapyItem()
item['title'] = title
yield item
|
<commit_before><commit_msg>Add spider for Hacker News<commit_after>
|
from scrapy.spiders import Spider
from scrapy.http import Request
from hackernews_scrapy.items import HackernewsScrapyItem
from scrapy.selector import Selector
class HackernewsSpider(Spider):
name = 'pythonhackernews'
allowed_hosts = ['news.ycombinator.com']
start_urls = ['https://news.ycombinator.com']
def parse(self, response):
"""Parse the HTML to get the information we need"""
html_xpath_selector = Selector(response)
titles = html_xpath_selector.xpath('//td[@class="title"]/a/text()').extract()
for title in titles:
item = HackernewsScrapyItem()
item['title'] = title
yield item
|
Add spider for Hacker Newsfrom scrapy.spiders import Spider
from scrapy.http import Request
from hackernews_scrapy.items import HackernewsScrapyItem
from scrapy.selector import Selector
class HackernewsSpider(Spider):
name = 'pythonhackernews'
allowed_hosts = ['news.ycombinator.com']
start_urls = ['https://news.ycombinator.com']
def parse(self, response):
"""Parse the HTML to get the information we need"""
html_xpath_selector = Selector(response)
titles = html_xpath_selector.xpath('//td[@class="title"]/a/text()').extract()
for title in titles:
item = HackernewsScrapyItem()
item['title'] = title
yield item
|
<commit_before><commit_msg>Add spider for Hacker News<commit_after>from scrapy.spiders import Spider
from scrapy.http import Request
from hackernews_scrapy.items import HackernewsScrapyItem
from scrapy.selector import Selector
class HackernewsSpider(Spider):
name = 'pythonhackernews'
allowed_hosts = ['news.ycombinator.com']
start_urls = ['https://news.ycombinator.com']
def parse(self, response):
"""Parse the HTML to get the information we need"""
html_xpath_selector = Selector(response)
titles = html_xpath_selector.xpath('//td[@class="title"]/a/text()').extract()
for title in titles:
item = HackernewsScrapyItem()
item['title'] = title
yield item
|
|
82cc7fa29747672abe6c27d3540c272b576c1d4b
|
listfriendships.py
|
listfriendships.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This script lists all the followers of a Twitter user.
Examples:
You must specify the user's screen_name you want to show::
$ python listfriendships.py list-friends screen_name
$ python listfriendships.py list-followers screen_name
"""
from secret import twitter_instance
from argparse import ArgumentParser
__version__ = '1.0.0'
COMMAND_LIST_FRIENDS = 'list-friends'
COMMAND_LIST_FOLLOWERS = 'list-followers'
def configure():
"""Parse the command line parameters.
Returns:
An instance of argparse.ArgumentParser that stores the command line
parameters.
"""
parser = ArgumentParser(description='Twitter Followers Viewer')
parser.add_argument('--version', action='version', version=__version__)
# Positional arguments
parser.add_argument(
'command',
choices=[COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS,],
help='Either "{0}" or "{1}".'.format(COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS))
parser.add_argument(
'screen_name',
help='The screen name of the target user.')
return parser
def format_user(user):
"""Return a string that shows user information.
Args:
user: An instance of the Twitter API users response object.
Returns:
A colon-separated value string.
"""
return '{screen_name}:{name}:{description}:{url}'.format(**user).replace('\r', '').replace('\n', '')
def main(args):
"""The main function.
Args:
args: An instance of argparse.ArgumentParser parsed in the configure
function.
Returns:
None.
"""
tw = twitter_instance()
next_cursor = -1
cmd = None
if args.command == COMMAND_LIST_FRIENDS:
cmd = tw.friends.list
elif args.command == COMMAND_LIST_FOLLOWERS:
cmd = tw.followers.list
while next_cursor != 0:
friends = cmd(
screen_name=args.screen_name,
cursor=next_cursor,
count=200,
skip_status=True,
include_user_entities=False,)
for user in friends['users']:
print(format_user(user))
next_cursor = friends['next_cursor']
if __name__ == '__main__':
parser = configure()
main(parser.parse_args())
|
Add a Python script which lists the friendships of a Twitter user.
|
Add a Python script which lists the friendships of a Twitter user.
|
Python
|
mit
|
showa-yojyo/bin,showa-yojyo/bin
|
Add a Python script which lists the friendships of a Twitter user.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This script lists all the followers of a Twitter user.
Examples:
You must specify the user's screen_name you want to show::
$ python listfriendships.py list-friends screen_name
$ python listfriendships.py list-followers screen_name
"""
from secret import twitter_instance
from argparse import ArgumentParser
__version__ = '1.0.0'
COMMAND_LIST_FRIENDS = 'list-friends'
COMMAND_LIST_FOLLOWERS = 'list-followers'
def configure():
"""Parse the command line parameters.
Returns:
An instance of argparse.ArgumentParser that stores the command line
parameters.
"""
parser = ArgumentParser(description='Twitter Followers Viewer')
parser.add_argument('--version', action='version', version=__version__)
# Positional arguments
parser.add_argument(
'command',
choices=[COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS,],
help='Either "{0}" or "{1}".'.format(COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS))
parser.add_argument(
'screen_name',
help='The screen name of the target user.')
return parser
def format_user(user):
"""Return a string that shows user information.
Args:
user: An instance of the Twitter API users response object.
Returns:
A colon-separated value string.
"""
return '{screen_name}:{name}:{description}:{url}'.format(**user).replace('\r', '').replace('\n', '')
def main(args):
"""The main function.
Args:
args: An instance of argparse.ArgumentParser parsed in the configure
function.
Returns:
None.
"""
tw = twitter_instance()
next_cursor = -1
cmd = None
if args.command == COMMAND_LIST_FRIENDS:
cmd = tw.friends.list
elif args.command == COMMAND_LIST_FOLLOWERS:
cmd = tw.followers.list
while next_cursor != 0:
friends = cmd(
screen_name=args.screen_name,
cursor=next_cursor,
count=200,
skip_status=True,
include_user_entities=False,)
for user in friends['users']:
print(format_user(user))
next_cursor = friends['next_cursor']
if __name__ == '__main__':
parser = configure()
main(parser.parse_args())
|
<commit_before><commit_msg>Add a Python script which lists the friendships of a Twitter user.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This script lists all the followers of a Twitter user.
Examples:
You must specify the user's screen_name you want to show::
$ python listfriendships.py list-friends screen_name
$ python listfriendships.py list-followers screen_name
"""
from secret import twitter_instance
from argparse import ArgumentParser
__version__ = '1.0.0'
COMMAND_LIST_FRIENDS = 'list-friends'
COMMAND_LIST_FOLLOWERS = 'list-followers'
def configure():
"""Parse the command line parameters.
Returns:
An instance of argparse.ArgumentParser that stores the command line
parameters.
"""
parser = ArgumentParser(description='Twitter Followers Viewer')
parser.add_argument('--version', action='version', version=__version__)
# Positional arguments
parser.add_argument(
'command',
choices=[COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS,],
help='Either "{0}" or "{1}".'.format(COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS))
parser.add_argument(
'screen_name',
help='The screen name of the target user.')
return parser
def format_user(user):
"""Return a string that shows user information.
Args:
user: An instance of the Twitter API users response object.
Returns:
A colon-separated value string.
"""
return '{screen_name}:{name}:{description}:{url}'.format(**user).replace('\r', '').replace('\n', '')
def main(args):
"""The main function.
Args:
args: An instance of argparse.ArgumentParser parsed in the configure
function.
Returns:
None.
"""
tw = twitter_instance()
next_cursor = -1
cmd = None
if args.command == COMMAND_LIST_FRIENDS:
cmd = tw.friends.list
elif args.command == COMMAND_LIST_FOLLOWERS:
cmd = tw.followers.list
while next_cursor != 0:
friends = cmd(
screen_name=args.screen_name,
cursor=next_cursor,
count=200,
skip_status=True,
include_user_entities=False,)
for user in friends['users']:
print(format_user(user))
next_cursor = friends['next_cursor']
if __name__ == '__main__':
parser = configure()
main(parser.parse_args())
|
Add a Python script which lists the friendships of a Twitter user.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This script lists all the followers of a Twitter user.
Examples:
You must specify the user's screen_name you want to show::
$ python listfriendships.py list-friends screen_name
$ python listfriendships.py list-followers screen_name
"""
from secret import twitter_instance
from argparse import ArgumentParser
__version__ = '1.0.0'
COMMAND_LIST_FRIENDS = 'list-friends'
COMMAND_LIST_FOLLOWERS = 'list-followers'
def configure():
"""Parse the command line parameters.
Returns:
An instance of argparse.ArgumentParser that stores the command line
parameters.
"""
parser = ArgumentParser(description='Twitter Followers Viewer')
parser.add_argument('--version', action='version', version=__version__)
# Positional arguments
parser.add_argument(
'command',
choices=[COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS,],
help='Either "{0}" or "{1}".'.format(COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS))
parser.add_argument(
'screen_name',
help='The screen name of the target user.')
return parser
def format_user(user):
"""Return a string that shows user information.
Args:
user: An instance of the Twitter API users response object.
Returns:
A colon-separated value string.
"""
return '{screen_name}:{name}:{description}:{url}'.format(**user).replace('\r', '').replace('\n', '')
def main(args):
"""The main function.
Args:
args: An instance of argparse.ArgumentParser parsed in the configure
function.
Returns:
None.
"""
tw = twitter_instance()
next_cursor = -1
cmd = None
if args.command == COMMAND_LIST_FRIENDS:
cmd = tw.friends.list
elif args.command == COMMAND_LIST_FOLLOWERS:
cmd = tw.followers.list
while next_cursor != 0:
friends = cmd(
screen_name=args.screen_name,
cursor=next_cursor,
count=200,
skip_status=True,
include_user_entities=False,)
for user in friends['users']:
print(format_user(user))
next_cursor = friends['next_cursor']
if __name__ == '__main__':
parser = configure()
main(parser.parse_args())
|
<commit_before><commit_msg>Add a Python script which lists the friendships of a Twitter user.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This script lists all the followers of a Twitter user.
Examples:
You must specify the user's screen_name you want to show::
$ python listfriendships.py list-friends screen_name
$ python listfriendships.py list-followers screen_name
"""
from secret import twitter_instance
from argparse import ArgumentParser
__version__ = '1.0.0'
COMMAND_LIST_FRIENDS = 'list-friends'
COMMAND_LIST_FOLLOWERS = 'list-followers'
def configure():
"""Parse the command line parameters.
Returns:
An instance of argparse.ArgumentParser that stores the command line
parameters.
"""
parser = ArgumentParser(description='Twitter Followers Viewer')
parser.add_argument('--version', action='version', version=__version__)
# Positional arguments
parser.add_argument(
'command',
choices=[COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS,],
help='Either "{0}" or "{1}".'.format(COMMAND_LIST_FRIENDS, COMMAND_LIST_FOLLOWERS))
parser.add_argument(
'screen_name',
help='The screen name of the target user.')
return parser
def format_user(user):
"""Return a string that shows user information.
Args:
user: An instance of the Twitter API users response object.
Returns:
A colon-separated value string.
"""
return '{screen_name}:{name}:{description}:{url}'.format(**user).replace('\r', '').replace('\n', '')
def main(args):
"""The main function.
Args:
args: An instance of argparse.ArgumentParser parsed in the configure
function.
Returns:
None.
"""
tw = twitter_instance()
next_cursor = -1
cmd = None
if args.command == COMMAND_LIST_FRIENDS:
cmd = tw.friends.list
elif args.command == COMMAND_LIST_FOLLOWERS:
cmd = tw.followers.list
while next_cursor != 0:
friends = cmd(
screen_name=args.screen_name,
cursor=next_cursor,
count=200,
skip_status=True,
include_user_entities=False,)
for user in friends['users']:
print(format_user(user))
next_cursor = friends['next_cursor']
if __name__ == '__main__':
parser = configure()
main(parser.parse_args())
|
|
cc321636b1a755381067d18444a951c852771f21
|
zephyr/management/commands/bankrupt_users.py
|
zephyr/management/commands/bankrupt_users.py
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zephyr.lib.actions import update_message_flags
from zephyr.models import UserProfile, Message, get_user_profile_by_email
class Command(BaseCommand):
help = """Bankrupt one or many users.
Usage: python manage.py bankrupt_users <list of email addresses>"""
def handle(self, *args, **options):
if len(args) < 1:
print "Please provide at least one e-mail address."
exit(1)
for email in args:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
continue
update_message_flags(user_profile, "add", "read", None, True)
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save()
print "%s: %d => %d" % (email, old_pointer, new_pointer)
else:
print "%s has no messages, can't bankrupt!" % (email,)
|
Add a management command to bankrupt users.
|
Add a management command to bankrupt users.
(imported from commit 58fbd08fc31a69c9ee7fb73b9302d44eb87db1fa)
|
Python
|
apache-2.0
|
LAndreas/zulip,sharmaeklavya2/zulip,mahim97/zulip,jrowan/zulip,willingc/zulip,willingc/zulip,amanharitsh123/zulip,moria/zulip,johnny9/zulip,xuxiao/zulip,christi3k/zulip,stamhe/zulip,alliejones/zulip,bitemyapp/zulip,Diptanshu8/zulip,sup95/zulip,m1ssou/zulip,bowlofstew/zulip,jackrzhang/zulip,dhcrzf/zulip,thomasboyt/zulip,alliejones/zulip,dxq-git/zulip,EasonYi/zulip,SmartPeople/zulip,pradiptad/zulip,natanovia/zulip,MariaFaBella85/zulip,wavelets/zulip,armooo/zulip,Cheppers/zulip,zhaoweigg/zulip,grave-w-grave/zulip,praveenaki/zulip,johnny9/zulip,Vallher/zulip,ahmadassaf/zulip,shrikrishnaholla/zulip,wangdeshui/zulip,LeeRisk/zulip,rht/zulip,zorojean/zulip,tiansiyuan/zulip,niftynei/zulip,amanharitsh123/zulip,aps-sids/zulip,aps-sids/zulip,vakila/zulip,levixie/zulip,aliceriot/zulip,karamcnair/zulip,sup95/zulip,amanharitsh123/zulip,hafeez3000/zulip,bitemyapp/zulip,developerfm/zulip,brainwane/zulip,Batterfii/zulip,blaze225/zulip,swinghu/zulip,xuanhan863/zulip,dwrpayne/zulip,reyha/zulip,eastlhu/zulip,itnihao/zulip,eastlhu/zulip,hackerkid/zulip,paxapy/zulip,JPJPJPOPOP/zulip,levixie/zulip,hengqujushi/zulip,jerryge/zulip,peiwei/zulip,rishig/zulip,yocome/zulip,ahmadassaf/zulip,jeffcao/zulip,willingc/zulip,firstblade/zulip,samatdav/zulip,udxxabp/zulip,he15his/zulip,johnnygaddarr/zulip,stamhe/zulip,JanzTam/zulip,seapasulli/zulip,esander91/zulip,easyfmxu/zulip,huangkebo/zulip,hafeez3000/zulip,PhilSk/zulip,zofuthan/zulip,niftynei/zulip,udxxabp/zulip,shaunstanislaus/zulip,jimmy54/zulip,punchagan/zulip,aakash-cr7/zulip,yuvipanda/zulip,mdavid/zulip,showell/zulip,sup95/zulip,codeKonami/zulip,so0k/zulip,JPJPJPOPOP/zulip,technicalpickles/zulip,pradiptad/zulip,guiquanz/zulip,kokoar/zulip,shubhamdhama/zulip,atomic-labs/zulip,technicalpickles/zulip,zwily/zulip,krtkmj/zulip,brockwhittaker/zulip,jimmy54/zulip,showell/zulip,he15his/zulip,praveenaki/zulip,atomic-labs/zulip,isht3/zulip,huangkebo/zulip,PaulPetring/zulip,jessedhillon/zulip,atomic-labs/zulip,easyfmxu/zulip,blaze225/zulip,KingxBanana/zulip,tdr130/zulip,wangdeshui/zulip,lfranchi/zulip,RobotCaleb/zulip,vakila/zulip,bssrdf/zulip,aps-sids/zulip,zwily/zulip,Cheppers/zulip,itnihao/zulip,dawran6/zulip,souravbadami/zulip,ApsOps/zulip,jainayush975/zulip,bitemyapp/zulip,guiquanz/zulip,bssrdf/zulip,shrikrishnaholla/zulip,Suninus/zulip,bluesea/zulip,peguin40/zulip,cosmicAsymmetry/zulip,AZtheAsian/zulip,technicalpickles/zulip,SmartPeople/zulip,littledogboy/zulip,noroot/zulip,punchagan/zulip,ryanbackman/zulip,amyliu345/zulip,susansls/zulip,dattatreya303/zulip,suxinde2009/zulip,brockwhittaker/zulip,susansls/zulip,itnihao/zulip,gkotian/zulip,wavelets/zulip,eeshangarg/zulip,mohsenSy/zulip,Drooids/zulip,kou/zulip,vikas-parashar/zulip,christi3k/zulip,KJin99/zulip,shubhamdhama/zulip,karamcnair/zulip,joyhchen/zulip,armooo/zulip,alliejones/zulip,m1ssou/zulip,themass/zulip,jrowan/zulip,andersk/zulip,eeshangarg/zulip,andersk/zulip,AZtheAsian/zulip,Juanvulcano/zulip,bssrdf/zulip,timabbott/zulip,zorojean/zulip,paxapy/zulip,aps-sids/zulip,themass/zulip,saitodisse/zulip,tiansiyuan/zulip,luyifan/zulip,Cheppers/zulip,SmartPeople/zulip,zachallaun/zulip,deer-hope/zulip,easyfmxu/zulip,reyha/zulip,gigawhitlocks/zulip,fw1121/zulip,arpitpanwar/zulip,saitodisse/zulip,showell/zulip,ipernet/zulip,he15his/zulip,jrowan/zulip,brainwane/zulip,LAndreas/zulip,krtkmj/zulip,dattatreya303/zulip,calvinleenyc/zulip,huangkebo/zulip,bastianh/zulip,stamhe/zulip,avastu/zulip,arpitpanwar/zulip,synicalsyntax/zulip,dxq-git/zulip,Gabriel0402/zulip,bssrdf/zulip,akuseru/zulip,schatt/zulip,littledogboy/zulip,nicholasbs/zulip,avastu/zulip,Drooids/zulip,tiansiyuan/zulip,mohsenSy/zulip,Vallher/zulip,dhcrzf/zulip,blaze225/zulip,jphilipsen05/zulip,arpitpanwar/zulip,joyhchen/zulip,mdavid/zulip,MayB/zulip,arpitpanwar/zulip,JanzTam/zulip,shubhamdhama/zulip,avastu/zulip,amanharitsh123/zulip,atomic-labs/zulip,dwrpayne/zulip,ufosky-server/zulip,jessedhillon/zulip,qq1012803704/zulip,voidException/zulip,jackrzhang/zulip,hayderimran7/zulip,kaiyuanheshang/zulip,yocome/zulip,MariaFaBella85/zulip,grave-w-grave/zulip,shubhamdhama/zulip,jainayush975/zulip,paxapy/zulip,arpith/zulip,andersk/zulip,wavelets/zulip,wweiradio/zulip,jessedhillon/zulip,Jianchun1/zulip,susansls/zulip,jerryge/zulip,levixie/zulip,ipernet/zulip,stamhe/zulip,tommyip/zulip,Jianchun1/zulip,mansilladev/zulip,Batterfii/zulip,mahim97/zulip,johnny9/zulip,amyliu345/zulip,karamcnair/zulip,babbage/zulip,seapasulli/zulip,PhilSk/zulip,swinghu/zulip,deer-hope/zulip,karamcnair/zulip,armooo/zulip,reyha/zulip,peiwei/zulip,Juanvulcano/zulip,SmartPeople/zulip,he15his/zulip,peguin40/zulip,wavelets/zulip,Jianchun1/zulip,rishig/zulip,fw1121/zulip,luyifan/zulip,ufosky-server/zulip,jphilipsen05/zulip,jimmy54/zulip,PaulPetring/zulip,guiquanz/zulip,dattatreya303/zulip,thomasboyt/zulip,mdavid/zulip,jphilipsen05/zulip,shrikrishnaholla/zulip,ryanbackman/zulip,punchagan/zulip,EasonYi/zulip,Jianchun1/zulip,bssrdf/zulip,Frouk/zulip,aps-sids/zulip,thomasboyt/zulip,tdr130/zulip,dawran6/zulip,KJin99/zulip,amallia/zulip,dwrpayne/zulip,adnanh/zulip,dnmfarrell/zulip,ikasumiwt/zulip,ufosky-server/zulip,zulip/zulip,peiwei/zulip,fw1121/zulip,AZtheAsian/zulip,stamhe/zulip,ashwinirudrappa/zulip,ericzhou2008/zulip,JPJPJPOPOP/zulip,bastianh/zulip,jeffcao/zulip,cosmicAsymmetry/zulip,codeKonami/zulip,zhaoweigg/zulip,rht/zulip,pradiptad/zulip,natanovia/zulip,showell/zulip,itnihao/zulip,bowlofstew/zulip,easyfmxu/zulip,schatt/zulip,dhcrzf/zulip,PaulPetring/zulip,saitodisse/zulip,avastu/zulip,Frouk/zulip,huangkebo/zulip,m1ssou/zulip,pradiptad/zulip,zwily/zulip,TigorC/zulip,natanovia/zulip,susansls/zulip,LeeRisk/zulip,gkotian/zulip,vabs22/zulip,sonali0901/zulip,calvinleenyc/zulip,luyifan/zulip,voidException/zulip,jonesgithub/zulip,yuvipanda/zulip,vikas-parashar/zulip,peguin40/zulip,glovebx/zulip,zhaoweigg/zulip,calvinleenyc/zulip,shrikrishnaholla/zulip,zofuthan/zulip,jrowan/zulip,eastlhu/zulip,technicalpickles/zulip,vaidap/zulip,brainwane/zulip,jessedhillon/zulip,zhaoweigg/zulip,ipernet/zulip,jerryge/zulip,easyfmxu/zulip,udxxabp/zulip,natanovia/zulip,joshisa/zulip,tommyip/zulip,tiansiyuan/zulip,christi3k/zulip,ryansnowboarder/zulip,littledogboy/zulip,Diptanshu8/zulip,zofuthan/zulip,shaunstanislaus/zulip,synicalsyntax/zulip,tommyip/zulip,so0k/zulip,jrowan/zulip,moria/zulip,zhaoweigg/zulip,jainayush975/zulip,hj3938/zulip,ashwinirudrappa/zulip,babbage/zulip,JanzTam/zulip,noroot/zulip,bastianh/zulip,ahmadassaf/zulip,alliejones/zulip,schatt/zulip,kokoar/zulip,codeKonami/zulip,wweiradio/zulip,jphilipsen05/zulip,RobotCaleb/zulip,Frouk/zulip,ericzhou2008/zulip,zacps/zulip,TigorC/zulip,saitodisse/zulip,moria/zulip,j831/zulip,bssrdf/zulip,Cheppers/zulip,mohsenSy/zulip,shubhamdhama/zulip,adnanh/zulip,niftynei/zulip,bluesea/zulip,littledogboy/zulip,aliceriot/zulip,Jianchun1/zulip,ashwinirudrappa/zulip,rishig/zulip,suxinde2009/zulip,niftynei/zulip,johnnygaddarr/zulip,bastianh/zulip,wavelets/zulip,RobotCaleb/zulip,guiquanz/zulip,hackerkid/zulip,LeeRisk/zulip,rishig/zulip,schatt/zulip,amallia/zulip,calvinleenyc/zulip,brainwane/zulip,hayderimran7/zulip,dawran6/zulip,hayderimran7/zulip,jeffcao/zulip,deer-hope/zulip,jerryge/zulip,firstblade/zulip,babbage/zulip,aliceriot/zulip,timabbott/zulip,huangkebo/zulip,MayB/zulip,brockwhittaker/zulip,dwrpayne/zulip,esander91/zulip,shrikrishnaholla/zulip,souravbadami/zulip,alliejones/zulip,wdaher/zulip,aakash-cr7/zulip,joshisa/zulip,suxinde2009/zulip,xuanhan863/zulip,guiquanz/zulip,avastu/zulip,avastu/zulip,tiansiyuan/zulip,j831/zulip,Drooids/zulip,KingxBanana/zulip,lfranchi/zulip,dotcool/zulip,samatdav/zulip,Qgap/zulip,dhcrzf/zulip,seapasulli/zulip,amallia/zulip,jonesgithub/zulip,natanovia/zulip,ApsOps/zulip,amanharitsh123/zulip,littledogboy/zulip,timabbott/zulip,tdr130/zulip,aakash-cr7/zulip,mdavid/zulip,ipernet/zulip,Frouk/zulip,udxxabp/zulip,yuvipanda/zulip,sonali0901/zulip,seapasulli/zulip,jonesgithub/zulip,proliming/zulip,vaidap/zulip,zorojean/zulip,akuseru/zulip,thomasboyt/zulip,peguin40/zulip,dhcrzf/zulip,proliming/zulip,glovebx/zulip,blaze225/zulip,Vallher/zulip,punchagan/zulip,dxq-git/zulip,wweiradio/zulip,sharmaeklavya2/zulip,akuseru/zulip,hayderimran7/zulip,gkotian/zulip,arpitpanwar/zulip,vakila/zulip,LAndreas/zulip,xuanhan863/zulip,j831/zulip,wangdeshui/zulip,m1ssou/zulip,LeeRisk/zulip,synicalsyntax/zulip,dotcool/zulip,dotcool/zulip,ashwinirudrappa/zulip,hj3938/zulip,johnny9/zulip,Vallher/zulip,hafeez3000/zulip,PhilSk/zulip,amyliu345/zulip,dwrpayne/zulip,JanzTam/zulip,vabs22/zulip,noroot/zulip,wweiradio/zulip,thomasboyt/zulip,PhilSk/zulip,rishig/zulip,Gabriel0402/zulip,peiwei/zulip,Galexrt/zulip,m1ssou/zulip,Vallher/zulip,eastlhu/zulip,TigorC/zulip,SmartPeople/zulip,developerfm/zulip,krtkmj/zulip,schatt/zulip,bluesea/zulip,shaunstanislaus/zulip,LAndreas/zulip,dattatreya303/zulip,Suninus/zulip,codeKonami/zulip,wdaher/zulip,reyha/zulip,gigawhitlocks/zulip,ericzhou2008/zulip,amanharitsh123/zulip,levixie/zulip,zulip/zulip,hengqujushi/zulip,Qgap/zulip,aliceriot/zulip,jainayush975/zulip,gigawhitlocks/zulip,dawran6/zulip,gkotian/zulip,calvinleenyc/zulip,hengqujushi/zulip,ryanbackman/zulip,seapasulli/zulip,technicalpickles/zulip,isht3/zulip,johnnygaddarr/zulip,vakila/zulip,zacps/zulip,technicalpickles/zulip,souravbadami/zulip,hackerkid/zulip,voidException/zulip,umkay/zulip,jeffcao/zulip,udxxabp/zulip,synicalsyntax/zulip,RobotCaleb/zulip,saitodisse/zulip,so0k/zulip,lfranchi/zulip,johnnygaddarr/zulip,hustlzp/zulip,TigorC/zulip,proliming/zulip,dwrpayne/zulip,swinghu/zulip,bowlofstew/zulip,Drooids/zulip,dotcool/zulip,ufosky-server/zulip,mansilladev/zulip,xuxiao/zulip,kokoar/zulip,kokoar/zulip,voidException/zulip,kaiyuanheshang/zulip,nicholasbs/zulip,wweiradio/zulip,esander91/zulip,glovebx/zulip,EasonYi/zulip,ahmadassaf/zulip,isht3/zulip,akuseru/zulip,souravbadami/zulip,praveenaki/zulip,adnanh/zulip,itnihao/zulip,eeshangarg/zulip,mahim97/zulip,cosmicAsymmetry/zulip,saitodisse/zulip,deer-hope/zulip,brainwane/zulip,Batterfii/zulip,PhilSk/zulip,jimmy54/zulip,kokoar/zulip,Suninus/zulip,showell/zulip,wdaher/zulip,suxinde2009/zulip,easyfmxu/zulip,proliming/zulip,firstblade/zulip,jeffcao/zulip,shaunstanislaus/zulip,bowlofstew/zulip,dnmfarrell/zulip,voidException/zulip,niftynei/zulip,jackrzhang/zulip,atomic-labs/zulip,wangdeshui/zulip,grave-w-grave/zulip,qq1012803704/zulip,ahmadassaf/zulip,ericzhou2008/zulip,fw1121/zulip,mansilladev/zulip,Cheppers/zulip,joshisa/zulip,eastlhu/zulip,RobotCaleb/zulip,j831/zulip,hafeez3000/zulip,thomasboyt/zulip,bowlofstew/zulip,Juanvulcano/zulip,Drooids/zulip,ryansnowboarder/zulip,qq1012803704/zulip,rht/zulip,arpith/zulip,arpith/zulip,vaidap/zulip,synicalsyntax/zulip,willingc/zulip,Juanvulcano/zulip,zofuthan/zulip,MariaFaBella85/zulip,swinghu/zulip,yuvipanda/zulip,Frouk/zulip,zwily/zulip,zacps/zulip,jonesgithub/zulip,ryansnowboarder/zulip,dnmfarrell/zulip,kaiyuanheshang/zulip,aliceriot/zulip,hafeez3000/zulip,babbage/zulip,natanovia/zulip,vaidap/zulip,Gabriel0402/zulip,dotcool/zulip,hustlzp/zulip,zachallaun/zulip,amyliu345/zulip,bluesea/zulip,karamcnair/zulip,atomic-labs/zulip,EasonYi/zulip,MayB/zulip,kou/zulip,huangkebo/zulip,armooo/zulip,mahim97/zulip,showell/zulip,amallia/zulip,zorojean/zulip,DazWorrall/zulip,qq1012803704/zulip,karamcnair/zulip,seapasulli/zulip,bluesea/zulip,Suninus/zulip,JanzTam/zulip,sonali0901/zulip,armooo/zulip,jphilipsen05/zulip,swinghu/zulip,dawran6/zulip,adnanh/zulip,kokoar/zulip,developerfm/zulip,vikas-parashar/zulip,esander91/zulip,jerryge/zulip,dxq-git/zulip,KJin99/zulip,zofuthan/zulip,dxq-git/zulip,Batterfii/zulip,arpith/zulip,armooo/zulip,atomic-labs/zulip,KJin99/zulip,hayderimran7/zulip,aps-sids/zulip,LeeRisk/zulip,mohsenSy/zulip,hj3938/zulip,praveenaki/zulip,dnmfarrell/zulip,tommyip/zulip,DazWorrall/zulip,mdavid/zulip,gkotian/zulip,babbage/zulip,Qgap/zulip,bitemyapp/zulip,jackrzhang/zulip,souravbadami/zulip,krtkmj/zulip,LeeRisk/zulip,kokoar/zulip,krtkmj/zulip,itnihao/zulip,itnihao/zulip,thomasboyt/zulip,glovebx/zulip,eeshangarg/zulip,xuxiao/zulip,hackerkid/zulip,kaiyuanheshang/zulip,jerryge/zulip,zulip/zulip,jonesgithub/zulip,dattatreya303/zulip,Galexrt/zulip,zulip/zulip,avastu/zulip,ikasumiwt/zulip,bitemyapp/zulip,ryanbackman/zulip,sharmaeklavya2/zulip,vaidap/zulip,ericzhou2008/zulip,suxinde2009/zulip,ahmadassaf/zulip,kou/zulip,zachallaun/zulip,nicholasbs/zulip,KingxBanana/zulip,luyifan/zulip,zulip/zulip,ryansnowboarder/zulip,eeshangarg/zulip,jackrzhang/zulip,nicholasbs/zulip,tdr130/zulip,vakila/zulip,MayB/zulip,bowlofstew/zulip,samatdav/zulip,yuvipanda/zulip,deer-hope/zulip,andersk/zulip,babbage/zulip,shaunstanislaus/zulip,nicholasbs/zulip,MariaFaBella85/zulip,jainayush975/zulip,tbutter/zulip,umkay/zulip,samatdav/zulip,kaiyuanheshang/zulip,gigawhitlocks/zulip,firstblade/zulip,guiquanz/zulip,mansilladev/zulip,jainayush975/zulip,KJin99/zulip,mdavid/zulip,Suninus/zulip,bastianh/zulip,jackrzhang/zulip,sup95/zulip,hackerkid/zulip,synicalsyntax/zulip,gigawhitlocks/zulip,RobotCaleb/zulip,umkay/zulip,zulip/zulip,seapasulli/zulip,wdaher/zulip,JanzTam/zulip,paxapy/zulip,joyhchen/zulip,saitodisse/zulip,qq1012803704/zulip,samatdav/zulip,so0k/zulip,vikas-parashar/zulip,sonali0901/zulip,zacps/zulip,ikasumiwt/zulip,xuxiao/zulip,esander91/zulip,DazWorrall/zulip,verma-varsha/zulip,proliming/zulip,johnnygaddarr/zulip,Suninus/zulip,sonali0901/zulip,schatt/zulip,MariaFaBella85/zulip,xuanhan863/zulip,Diptanshu8/zulip,umkay/zulip,rht/zulip,christi3k/zulip,udxxabp/zulip,rishig/zulip,voidException/zulip,timabbott/zulip,Galexrt/zulip,paxapy/zulip,aps-sids/zulip,vikas-parashar/zulip,arpitpanwar/zulip,akuseru/zulip,KJin99/zulip,dwrpayne/zulip,verma-varsha/zulip,ufosky-server/zulip,kou/zulip,hengqujushi/zulip,hustlzp/zulip,wavelets/zulip,hustlzp/zulip,zorojean/zulip,littledogboy/zulip,schatt/zulip,joshisa/zulip,so0k/zulip,developerfm/zulip,brockwhittaker/zulip,moria/zulip,ApsOps/zulip,zhaoweigg/zulip,dnmfarrell/zulip,fw1121/zulip,MariaFaBella85/zulip,zhaoweigg/zulip,PaulPetring/zulip,ryansnowboarder/zulip,shubhamdhama/zulip,jessedhillon/zulip,mohsenSy/zulip,dhcrzf/zulip,DazWorrall/zulip,yuvipanda/zulip,adnanh/zulip,wavelets/zulip,hj3938/zulip,tbutter/zulip,amallia/zulip,zachallaun/zulip,tbutter/zulip,peguin40/zulip,umkay/zulip,verma-varsha/zulip,glovebx/zulip,grave-w-grave/zulip,pradiptad/zulip,aakash-cr7/zulip,aakash-cr7/zulip,ApsOps/zulip,bluesea/zulip,Gabriel0402/zulip,hengqujushi/zulip,xuxiao/zulip,hafeez3000/zulip,codeKonami/zulip,Vallher/zulip,EasonYi/zulip,dnmfarrell/zulip,PaulPetring/zulip,dotcool/zulip,fw1121/zulip,deer-hope/zulip,akuseru/zulip,codeKonami/zulip,levixie/zulip,he15his/zulip,sup95/zulip,verma-varsha/zulip,zofuthan/zulip,johnny9/zulip,brainwane/zulip,PaulPetring/zulip,themass/zulip,Jianchun1/zulip,deer-hope/zulip,ufosky-server/zulip,ahmadassaf/zulip,voidException/zulip,verma-varsha/zulip,themass/zulip,bowlofstew/zulip,Qgap/zulip,Frouk/zulip,ApsOps/zulip,punchagan/zulip,tdr130/zulip,bitemyapp/zulip,joyhchen/zulip,joyhchen/zulip,sharmaeklavya2/zulip,tiansiyuan/zulip,m1ssou/zulip,alliejones/zulip,cosmicAsymmetry/zulip,mansilladev/zulip,umkay/zulip,eastlhu/zulip,ashwinirudrappa/zulip,ikasumiwt/zulip,EasonYi/zulip,reyha/zulip,susansls/zulip,zachallaun/zulip,hackerkid/zulip,synicalsyntax/zulip,praveenaki/zulip,yocome/zulip,isht3/zulip,firstblade/zulip,gigawhitlocks/zulip,johnnygaddarr/zulip,JPJPJPOPOP/zulip,reyha/zulip,timabbott/zulip,andersk/zulip,MayB/zulip,Juanvulcano/zulip,wangdeshui/zulip,krtkmj/zulip,Galexrt/zulip,arpith/zulip,zacps/zulip,codeKonami/zulip,DazWorrall/zulip,adnanh/zulip,andersk/zulip,samatdav/zulip,peiwei/zulip,Galexrt/zulip,ipernet/zulip,wweiradio/zulip,DazWorrall/zulip,joshisa/zulip,Gabriel0402/zulip,levixie/zulip,Galexrt/zulip,Qgap/zulip,jimmy54/zulip,dawran6/zulip,ipernet/zulip,christi3k/zulip,pradiptad/zulip,ikasumiwt/zulip,amallia/zulip,Diptanshu8/zulip,vaidap/zulip,mdavid/zulip,Juanvulcano/zulip,ericzhou2008/zulip,hengqujushi/zulip,MariaFaBella85/zulip,wdaher/zulip,tdr130/zulip,cosmicAsymmetry/zulip,j831/zulip,he15his/zulip,niftynei/zulip,ryanbackman/zulip,JPJPJPOPOP/zulip,johnny9/zulip,esander91/zulip,Diptanshu8/zulip,suxinde2009/zulip,developerfm/zulip,noroot/zulip,ryansnowboarder/zulip,m1ssou/zulip,Qgap/zulip,zorojean/zulip,MayB/zulip,mahim97/zulip,zacps/zulip,Batterfii/zulip,zwily/zulip,ashwinirudrappa/zulip,dxq-git/zulip,peiwei/zulip,tbutter/zulip,JPJPJPOPOP/zulip,zwily/zulip,hayderimran7/zulip,guiquanz/zulip,jerryge/zulip,bastianh/zulip,Frouk/zulip,kou/zulip,noroot/zulip,joshisa/zulip,ryanbackman/zulip,pradiptad/zulip,amyliu345/zulip,KJin99/zulip,brainwane/zulip,eastlhu/zulip,Drooids/zulip,ApsOps/zulip,lfranchi/zulip,so0k/zulip,kou/zulip,vabs22/zulip,rishig/zulip,stamhe/zulip,mohsenSy/zulip,yocome/zulip,Diptanshu8/zulip,DazWorrall/zulip,hj3938/zulip,lfranchi/zulip,j831/zulip,vikas-parashar/zulip,cosmicAsymmetry/zulip,lfranchi/zulip,ipernet/zulip,zwily/zulip,vabs22/zulip,proliming/zulip,Qgap/zulip,mansilladev/zulip,shrikrishnaholla/zulip,tommyip/zulip,gkotian/zulip,luyifan/zulip,brockwhittaker/zulip,ericzhou2008/zulip,mansilladev/zulip,tommyip/zulip,dattatreya303/zulip,vakila/zulip,MayB/zulip,willingc/zulip,shrikrishnaholla/zulip,xuxiao/zulip,tommyip/zulip,hafeez3000/zulip,rht/zulip,armooo/zulip,kaiyuanheshang/zulip,punchagan/zulip,kou/zulip,sonali0901/zulip,hengqujushi/zulip,wweiradio/zulip,christi3k/zulip,proliming/zulip,hj3938/zulip,Cheppers/zulip,esander91/zulip,zachallaun/zulip,wdaher/zulip,JanzTam/zulip,peguin40/zulip,aliceriot/zulip,joshisa/zulip,Batterfii/zulip,yocome/zulip,bssrdf/zulip,levixie/zulip,zofuthan/zulip,Vallher/zulip,nicholasbs/zulip,firstblade/zulip,susansls/zulip,amyliu345/zulip,jimmy54/zulip,gkotian/zulip,swinghu/zulip,technicalpickles/zulip,isht3/zulip,Cheppers/zulip,SmartPeople/zulip,jonesgithub/zulip,sup95/zulip,arpith/zulip,themass/zulip,peiwei/zulip,dhcrzf/zulip,Suninus/zulip,qq1012803704/zulip,TigorC/zulip,praveenaki/zulip,ufosky-server/zulip,tbutter/zulip,willingc/zulip,moria/zulip,developerfm/zulip,umkay/zulip,natanovia/zulip,hackerkid/zulip,yocome/zulip,souravbadami/zulip,jonesgithub/zulip,rht/zulip,hustlzp/zulip,alliejones/zulip,LAndreas/zulip,EasonYi/zulip,xuxiao/zulip,AZtheAsian/zulip,nicholasbs/zulip,ashwinirudrappa/zulip,fw1121/zulip,moria/zulip,PaulPetring/zulip,dotcool/zulip,AZtheAsian/zulip,eeshangarg/zulip,wangdeshui/zulip,Galexrt/zulip,shaunstanislaus/zulip,moria/zulip,adnanh/zulip,LAndreas/zulip,paxapy/zulip,stamhe/zulip,calvinleenyc/zulip,hustlzp/zulip,bluesea/zulip,easyfmxu/zulip,suxinde2009/zulip,Gabriel0402/zulip,jrowan/zulip,wdaher/zulip,bastianh/zulip,vabs22/zulip,arpitpanwar/zulip,isht3/zulip,sharmaeklavya2/zulip,aakash-cr7/zulip,dxq-git/zulip,zorojean/zulip,tbutter/zulip,luyifan/zulip,so0k/zulip,tbutter/zulip,grave-w-grave/zulip,wangdeshui/zulip,joyhchen/zulip,glovebx/zulip,themass/zulip,ApsOps/zulip,RobotCaleb/zulip,sharmaeklavya2/zulip,xuanhan863/zulip,timabbott/zulip,amallia/zulip,gigawhitlocks/zulip,he15his/zulip,grave-w-grave/zulip,LAndreas/zulip,Batterfii/zulip,noroot/zulip,qq1012803704/zulip,mahim97/zulip,hj3938/zulip,hustlzp/zulip,jphilipsen05/zulip,jessedhillon/zulip,blaze225/zulip,luyifan/zulip,jessedhillon/zulip,praveenaki/zulip,LeeRisk/zulip,eeshangarg/zulip,tiansiyuan/zulip,KingxBanana/zulip,swinghu/zulip,xuanhan863/zulip,punchagan/zulip,developerfm/zulip,KingxBanana/zulip,glovebx/zulip,yuvipanda/zulip,littledogboy/zulip,yocome/zulip,brockwhittaker/zulip,shubhamdhama/zulip,johnnygaddarr/zulip,zachallaun/zulip,karamcnair/zulip,willingc/zulip,johnny9/zulip,rht/zulip,Drooids/zulip,jimmy54/zulip,jeffcao/zulip,andersk/zulip,dnmfarrell/zulip,tdr130/zulip,timabbott/zulip,kaiyuanheshang/zulip,bitemyapp/zulip,ikasumiwt/zulip,KingxBanana/zulip,aliceriot/zulip,huangkebo/zulip,AZtheAsian/zulip,shaunstanislaus/zulip,ryansnowboarder/zulip,jeffcao/zulip,hayderimran7/zulip,xuanhan863/zulip,blaze225/zulip,lfranchi/zulip,ikasumiwt/zulip,zulip/zulip,vabs22/zulip,showell/zulip,PhilSk/zulip,udxxabp/zulip,jackrzhang/zulip,akuseru/zulip,vakila/zulip,TigorC/zulip,noroot/zulip,krtkmj/zulip,Gabriel0402/zulip,verma-varsha/zulip,themass/zulip,firstblade/zulip,babbage/zulip
|
Add a management command to bankrupt users.
(imported from commit 58fbd08fc31a69c9ee7fb73b9302d44eb87db1fa)
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zephyr.lib.actions import update_message_flags
from zephyr.models import UserProfile, Message, get_user_profile_by_email
class Command(BaseCommand):
help = """Bankrupt one or many users.
Usage: python manage.py bankrupt_users <list of email addresses>"""
def handle(self, *args, **options):
if len(args) < 1:
print "Please provide at least one e-mail address."
exit(1)
for email in args:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
continue
update_message_flags(user_profile, "add", "read", None, True)
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save()
print "%s: %d => %d" % (email, old_pointer, new_pointer)
else:
print "%s has no messages, can't bankrupt!" % (email,)
|
<commit_before><commit_msg>Add a management command to bankrupt users.
(imported from commit 58fbd08fc31a69c9ee7fb73b9302d44eb87db1fa)<commit_after>
|
from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zephyr.lib.actions import update_message_flags
from zephyr.models import UserProfile, Message, get_user_profile_by_email
class Command(BaseCommand):
help = """Bankrupt one or many users.
Usage: python manage.py bankrupt_users <list of email addresses>"""
def handle(self, *args, **options):
if len(args) < 1:
print "Please provide at least one e-mail address."
exit(1)
for email in args:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
continue
update_message_flags(user_profile, "add", "read", None, True)
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save()
print "%s: %d => %d" % (email, old_pointer, new_pointer)
else:
print "%s has no messages, can't bankrupt!" % (email,)
|
Add a management command to bankrupt users.
(imported from commit 58fbd08fc31a69c9ee7fb73b9302d44eb87db1fa)from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zephyr.lib.actions import update_message_flags
from zephyr.models import UserProfile, Message, get_user_profile_by_email
class Command(BaseCommand):
help = """Bankrupt one or many users.
Usage: python manage.py bankrupt_users <list of email addresses>"""
def handle(self, *args, **options):
if len(args) < 1:
print "Please provide at least one e-mail address."
exit(1)
for email in args:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
continue
update_message_flags(user_profile, "add", "read", None, True)
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save()
print "%s: %d => %d" % (email, old_pointer, new_pointer)
else:
print "%s has no messages, can't bankrupt!" % (email,)
|
<commit_before><commit_msg>Add a management command to bankrupt users.
(imported from commit 58fbd08fc31a69c9ee7fb73b9302d44eb87db1fa)<commit_after>from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zephyr.lib.actions import update_message_flags
from zephyr.models import UserProfile, Message, get_user_profile_by_email
class Command(BaseCommand):
help = """Bankrupt one or many users.
Usage: python manage.py bankrupt_users <list of email addresses>"""
def handle(self, *args, **options):
if len(args) < 1:
print "Please provide at least one e-mail address."
exit(1)
for email in args:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
print "e-mail %s doesn't exist in the system, skipping" % (email,)
continue
update_message_flags(user_profile, "add", "read", None, True)
messages = Message.objects.filter(
usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
old_pointer = user_profile.pointer
new_pointer = messages[0].id
user_profile.pointer = new_pointer
user_profile.save()
print "%s: %d => %d" % (email, old_pointer, new_pointer)
else:
print "%s has no messages, can't bankrupt!" % (email,)
|
|
ff54c4b9e7bfb79f10cb5495f6fccb2ea5abbd75
|
tests/utils.py
|
tests/utils.py
|
import logging
import ssl
import urllib2
TRIES=3
class TimeoutError(Exception):
pass
def test_3times_then_fail(func, *args, **kwargs):
log = logging.getLogger("tests")
for i in range(TRIES):
try:
return func(*args, **kwargs)
except urllib2.HTTPError, e:
if e.code == 500:
log.debug("Internal server error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Errors')
except ssl.SSLError, e:
print e.__dict__
if e.msg == "The read operation timed out":
log.debug("Timeout error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Timeouts')
|
Add test measurements against 500 errors and timeouts
|
Add test measurements against 500 errors and timeouts
|
Python
|
mit
|
authmillenon/pycomicvine
|
Add test measurements against 500 errors and timeouts
|
import logging
import ssl
import urllib2
TRIES=3
class TimeoutError(Exception):
pass
def test_3times_then_fail(func, *args, **kwargs):
log = logging.getLogger("tests")
for i in range(TRIES):
try:
return func(*args, **kwargs)
except urllib2.HTTPError, e:
if e.code == 500:
log.debug("Internal server error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Errors')
except ssl.SSLError, e:
print e.__dict__
if e.msg == "The read operation timed out":
log.debug("Timeout error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Timeouts')
|
<commit_before><commit_msg>Add test measurements against 500 errors and timeouts<commit_after>
|
import logging
import ssl
import urllib2
TRIES=3
class TimeoutError(Exception):
pass
def test_3times_then_fail(func, *args, **kwargs):
log = logging.getLogger("tests")
for i in range(TRIES):
try:
return func(*args, **kwargs)
except urllib2.HTTPError, e:
if e.code == 500:
log.debug("Internal server error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Errors')
except ssl.SSLError, e:
print e.__dict__
if e.msg == "The read operation timed out":
log.debug("Timeout error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Timeouts')
|
Add test measurements against 500 errors and timeoutsimport logging
import ssl
import urllib2
TRIES=3
class TimeoutError(Exception):
pass
def test_3times_then_fail(func, *args, **kwargs):
log = logging.getLogger("tests")
for i in range(TRIES):
try:
return func(*args, **kwargs)
except urllib2.HTTPError, e:
if e.code == 500:
log.debug("Internal server error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Errors')
except ssl.SSLError, e:
print e.__dict__
if e.msg == "The read operation timed out":
log.debug("Timeout error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Timeouts')
|
<commit_before><commit_msg>Add test measurements against 500 errors and timeouts<commit_after>import logging
import ssl
import urllib2
TRIES=3
class TimeoutError(Exception):
pass
def test_3times_then_fail(func, *args, **kwargs):
log = logging.getLogger("tests")
for i in range(TRIES):
try:
return func(*args, **kwargs)
except urllib2.HTTPError, e:
if e.code == 500:
log.debug("Internal server error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Errors')
except ssl.SSLError, e:
print e.__dict__
if e.msg == "The read operation timed out":
log.debug("Timeout error (try=%d)" % i)
if i == TRIES-1:
raise TimeoutError('To many HTTP-Timeouts')
|
|
a9504258826e434172031adeaa400dbe4e02daa4
|
migrations/versions/0214_another_letter_org.py
|
migrations/versions/0214_another_letter_org.py
|
"""empty message
Revision ID: 0214_another_letter_org
Revises: 0213_brand_colour_domain_
"""
# revision identifiers, used by Alembic.
revision = '0214_another_letter_org'
down_revision = '0213_brand_colour_domain_'
from alembic import op
NEW_ORGANISATIONS = [
('510', 'Pension Wise'),
]
def upgrade():
for numeric_id, name in NEW_ORGANISATIONS:
op.execute("""
INSERT
INTO dvla_organisation
VALUES ('{}', '{}')
""".format(numeric_id, name))
def downgrade():
for numeric_id, _ in NEW_ORGANISATIONS:
op.execute("""
DELETE
FROM dvla_organisation
WHERE id = '{}'
""".format(numeric_id))
|
Add letter logo for Pension Wise
|
Add letter logo for Pension Wise
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add letter logo for Pension Wise
|
"""empty message
Revision ID: 0214_another_letter_org
Revises: 0213_brand_colour_domain_
"""
# revision identifiers, used by Alembic.
revision = '0214_another_letter_org'
down_revision = '0213_brand_colour_domain_'
from alembic import op
NEW_ORGANISATIONS = [
('510', 'Pension Wise'),
]
def upgrade():
for numeric_id, name in NEW_ORGANISATIONS:
op.execute("""
INSERT
INTO dvla_organisation
VALUES ('{}', '{}')
""".format(numeric_id, name))
def downgrade():
for numeric_id, _ in NEW_ORGANISATIONS:
op.execute("""
DELETE
FROM dvla_organisation
WHERE id = '{}'
""".format(numeric_id))
|
<commit_before><commit_msg>Add letter logo for Pension Wise<commit_after>
|
"""empty message
Revision ID: 0214_another_letter_org
Revises: 0213_brand_colour_domain_
"""
# revision identifiers, used by Alembic.
revision = '0214_another_letter_org'
down_revision = '0213_brand_colour_domain_'
from alembic import op
NEW_ORGANISATIONS = [
('510', 'Pension Wise'),
]
def upgrade():
for numeric_id, name in NEW_ORGANISATIONS:
op.execute("""
INSERT
INTO dvla_organisation
VALUES ('{}', '{}')
""".format(numeric_id, name))
def downgrade():
for numeric_id, _ in NEW_ORGANISATIONS:
op.execute("""
DELETE
FROM dvla_organisation
WHERE id = '{}'
""".format(numeric_id))
|
Add letter logo for Pension Wise"""empty message
Revision ID: 0214_another_letter_org
Revises: 0213_brand_colour_domain_
"""
# revision identifiers, used by Alembic.
revision = '0214_another_letter_org'
down_revision = '0213_brand_colour_domain_'
from alembic import op
NEW_ORGANISATIONS = [
('510', 'Pension Wise'),
]
def upgrade():
for numeric_id, name in NEW_ORGANISATIONS:
op.execute("""
INSERT
INTO dvla_organisation
VALUES ('{}', '{}')
""".format(numeric_id, name))
def downgrade():
for numeric_id, _ in NEW_ORGANISATIONS:
op.execute("""
DELETE
FROM dvla_organisation
WHERE id = '{}'
""".format(numeric_id))
|
<commit_before><commit_msg>Add letter logo for Pension Wise<commit_after>"""empty message
Revision ID: 0214_another_letter_org
Revises: 0213_brand_colour_domain_
"""
# revision identifiers, used by Alembic.
revision = '0214_another_letter_org'
down_revision = '0213_brand_colour_domain_'
from alembic import op
NEW_ORGANISATIONS = [
('510', 'Pension Wise'),
]
def upgrade():
for numeric_id, name in NEW_ORGANISATIONS:
op.execute("""
INSERT
INTO dvla_organisation
VALUES ('{}', '{}')
""".format(numeric_id, name))
def downgrade():
for numeric_id, _ in NEW_ORGANISATIONS:
op.execute("""
DELETE
FROM dvla_organisation
WHERE id = '{}'
""".format(numeric_id))
|
|
cfab12182b6556a61cdd93ce715cf863abe69bbf
|
formidable/migrations/0005_conditions_default.py
|
formidable/migrations/0005_conditions_default.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('formidable', '0004_formidable_conditions'),
]
operations = [
migrations.AlterField(
model_name='formidable',
name='conditions',
field=jsonfield.fields.JSONField(default=list),
),
]
|
Add missing django db migration
|
Add missing django db migration
|
Python
|
mit
|
novafloss/django-formidable
|
Add missing django db migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('formidable', '0004_formidable_conditions'),
]
operations = [
migrations.AlterField(
model_name='formidable',
name='conditions',
field=jsonfield.fields.JSONField(default=list),
),
]
|
<commit_before><commit_msg>Add missing django db migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('formidable', '0004_formidable_conditions'),
]
operations = [
migrations.AlterField(
model_name='formidable',
name='conditions',
field=jsonfield.fields.JSONField(default=list),
),
]
|
Add missing django db migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('formidable', '0004_formidable_conditions'),
]
operations = [
migrations.AlterField(
model_name='formidable',
name='conditions',
field=jsonfield.fields.JSONField(default=list),
),
]
|
<commit_before><commit_msg>Add missing django db migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('formidable', '0004_formidable_conditions'),
]
operations = [
migrations.AlterField(
model_name='formidable',
name='conditions',
field=jsonfield.fields.JSONField(default=list),
),
]
|
|
36a8bcf42cc262c2adbcb67fee64d20e1cdf5c4c
|
tests/django_settings.py
|
tests/django_settings.py
|
# This is a settings file to run Django tests with django-pickling in effect
print('Installing django-pickling')
import django_pickling
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
|
Add settings file to run Django test suite with django-pickling in effect
|
Add settings file to run Django test suite with django-pickling in effect
|
Python
|
bsd-3-clause
|
Suor/django-pickling
|
Add settings file to run Django test suite with django-pickling in effect
|
# This is a settings file to run Django tests with django-pickling in effect
print('Installing django-pickling')
import django_pickling
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
|
<commit_before><commit_msg>Add settings file to run Django test suite with django-pickling in effect<commit_after>
|
# This is a settings file to run Django tests with django-pickling in effect
print('Installing django-pickling')
import django_pickling
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
|
Add settings file to run Django test suite with django-pickling in effect# This is a settings file to run Django tests with django-pickling in effect
print('Installing django-pickling')
import django_pickling
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
|
<commit_before><commit_msg>Add settings file to run Django test suite with django-pickling in effect<commit_after># This is a settings file to run Django tests with django-pickling in effect
print('Installing django-pickling')
import django_pickling
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
|
|
bae587f2d6d9b9ea3023934ad5796ff6a15fe765
|
nstl/passes/nameresolve.py
|
nstl/passes/nameresolve.py
|
from .. import ast
import sys
from itertools import chain
class Scope(dict):
def __init__(self, parent=None, name=None, *args, **kwargs):
assert isinstance(parent, Scope) or parent is None
super().__init__(*args, **kwargs)
self.name = name
self.parent = parent
self.scopes = dict()
if parent is not None and self.name is not None:
self.parent.scopes.update({self.name:self})
def get_outer_scope(self, name):
"""Return the nearest reachable scope with the corresponding name.
"""
try:
return self.scopes[name]
except KeyError:
if self.parent is None:
raise NameError("scope {} is not reachable".format(name))
return self.parent.get_outer_scope(name)
def __contains__(self, name):
"""Return whether a name is reachable from the current scope.
"""
return (super().__contains__(name) or
any(name in scope for scope in self.parents()))
def __getitem__(self, name):
"""Return the object binding to a name, if the name is in scope.
"""
try:
return super().__getitem__(name)
except KeyError:
if self.parent is None:
raise NameError("name {} is not in scope".format(name))
return self.parent.__getitem__(name)
def __setitem__(self, *args, **kwargs):
"""Bind a name to an object inside the current scope.
"""
return super().__setitem__(*args, **kwargs)
def parents(self):
parent = self.parent
while parent is not None:
yield parent
parent = parent.parent
def show(self, buf=sys.stdout):
lead = ''
for scope in reversed(list(chain([self], self.parents()))):
for name, binding in scope.items():
buf.write(lead + str(name) + " : " + str(binding) + "\n")
lead = lead + ' ' * 4
class NameResolver(ast.NodeVisitor):
def visit_Namespace(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope, node.name.value)
self.generic_visit(node, node.scope)
def visit_Template(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope)
self.generic_visit(node, node.scope)
def visit_Identifier(self, node, current_scope):
if node.value not in current_scope:
raise NameError("unresolved reference {}".format(node.value))
node.resolved = current_scope[node.value]
def visit_QualifiedIdentifier(self, node, current_scope):
outer, *rest = node.quals
scope = current_scope.get_outer_scope(outer.value)
for qual in rest:
scope = getattr(scope, qual.value)
self.visit(node.name, scope)
node.resolved = node.name.resolved
|
Rewrite the name resolving pass.
|
Rewrite the name resolving pass.
This pass takes care of resolving which object are identifiers bound to.
It then stores the result by adding a 'resolved' attribute to each identifier
and qualified identifier node in the tree. These 'resolved' attributes can then
be accessed to refer to the object bound to the identifier, which is
invaluable during code generation.
|
Python
|
bsd-3-clause
|
ldionne/nstl-lang,ldionne/nstl-lang
|
Rewrite the name resolving pass.
This pass takes care of resolving which object are identifiers bound to.
It then stores the result by adding a 'resolved' attribute to each identifier
and qualified identifier node in the tree. These 'resolved' attributes can then
be accessed to refer to the object bound to the identifier, which is
invaluable during code generation.
|
from .. import ast
import sys
from itertools import chain
class Scope(dict):
def __init__(self, parent=None, name=None, *args, **kwargs):
assert isinstance(parent, Scope) or parent is None
super().__init__(*args, **kwargs)
self.name = name
self.parent = parent
self.scopes = dict()
if parent is not None and self.name is not None:
self.parent.scopes.update({self.name:self})
def get_outer_scope(self, name):
"""Return the nearest reachable scope with the corresponding name.
"""
try:
return self.scopes[name]
except KeyError:
if self.parent is None:
raise NameError("scope {} is not reachable".format(name))
return self.parent.get_outer_scope(name)
def __contains__(self, name):
"""Return whether a name is reachable from the current scope.
"""
return (super().__contains__(name) or
any(name in scope for scope in self.parents()))
def __getitem__(self, name):
"""Return the object binding to a name, if the name is in scope.
"""
try:
return super().__getitem__(name)
except KeyError:
if self.parent is None:
raise NameError("name {} is not in scope".format(name))
return self.parent.__getitem__(name)
def __setitem__(self, *args, **kwargs):
"""Bind a name to an object inside the current scope.
"""
return super().__setitem__(*args, **kwargs)
def parents(self):
parent = self.parent
while parent is not None:
yield parent
parent = parent.parent
def show(self, buf=sys.stdout):
lead = ''
for scope in reversed(list(chain([self], self.parents()))):
for name, binding in scope.items():
buf.write(lead + str(name) + " : " + str(binding) + "\n")
lead = lead + ' ' * 4
class NameResolver(ast.NodeVisitor):
def visit_Namespace(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope, node.name.value)
self.generic_visit(node, node.scope)
def visit_Template(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope)
self.generic_visit(node, node.scope)
def visit_Identifier(self, node, current_scope):
if node.value not in current_scope:
raise NameError("unresolved reference {}".format(node.value))
node.resolved = current_scope[node.value]
def visit_QualifiedIdentifier(self, node, current_scope):
outer, *rest = node.quals
scope = current_scope.get_outer_scope(outer.value)
for qual in rest:
scope = getattr(scope, qual.value)
self.visit(node.name, scope)
node.resolved = node.name.resolved
|
<commit_before><commit_msg>Rewrite the name resolving pass.
This pass takes care of resolving which object are identifiers bound to.
It then stores the result by adding a 'resolved' attribute to each identifier
and qualified identifier node in the tree. These 'resolved' attributes can then
be accessed to refer to the object bound to the identifier, which is
invaluable during code generation.<commit_after>
|
from .. import ast
import sys
from itertools import chain
class Scope(dict):
def __init__(self, parent=None, name=None, *args, **kwargs):
assert isinstance(parent, Scope) or parent is None
super().__init__(*args, **kwargs)
self.name = name
self.parent = parent
self.scopes = dict()
if parent is not None and self.name is not None:
self.parent.scopes.update({self.name:self})
def get_outer_scope(self, name):
"""Return the nearest reachable scope with the corresponding name.
"""
try:
return self.scopes[name]
except KeyError:
if self.parent is None:
raise NameError("scope {} is not reachable".format(name))
return self.parent.get_outer_scope(name)
def __contains__(self, name):
"""Return whether a name is reachable from the current scope.
"""
return (super().__contains__(name) or
any(name in scope for scope in self.parents()))
def __getitem__(self, name):
"""Return the object binding to a name, if the name is in scope.
"""
try:
return super().__getitem__(name)
except KeyError:
if self.parent is None:
raise NameError("name {} is not in scope".format(name))
return self.parent.__getitem__(name)
def __setitem__(self, *args, **kwargs):
"""Bind a name to an object inside the current scope.
"""
return super().__setitem__(*args, **kwargs)
def parents(self):
parent = self.parent
while parent is not None:
yield parent
parent = parent.parent
def show(self, buf=sys.stdout):
lead = ''
for scope in reversed(list(chain([self], self.parents()))):
for name, binding in scope.items():
buf.write(lead + str(name) + " : " + str(binding) + "\n")
lead = lead + ' ' * 4
class NameResolver(ast.NodeVisitor):
def visit_Namespace(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope, node.name.value)
self.generic_visit(node, node.scope)
def visit_Template(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope)
self.generic_visit(node, node.scope)
def visit_Identifier(self, node, current_scope):
if node.value not in current_scope:
raise NameError("unresolved reference {}".format(node.value))
node.resolved = current_scope[node.value]
def visit_QualifiedIdentifier(self, node, current_scope):
outer, *rest = node.quals
scope = current_scope.get_outer_scope(outer.value)
for qual in rest:
scope = getattr(scope, qual.value)
self.visit(node.name, scope)
node.resolved = node.name.resolved
|
Rewrite the name resolving pass.
This pass takes care of resolving which object are identifiers bound to.
It then stores the result by adding a 'resolved' attribute to each identifier
and qualified identifier node in the tree. These 'resolved' attributes can then
be accessed to refer to the object bound to the identifier, which is
invaluable during code generation.from .. import ast
import sys
from itertools import chain
class Scope(dict):
def __init__(self, parent=None, name=None, *args, **kwargs):
assert isinstance(parent, Scope) or parent is None
super().__init__(*args, **kwargs)
self.name = name
self.parent = parent
self.scopes = dict()
if parent is not None and self.name is not None:
self.parent.scopes.update({self.name:self})
def get_outer_scope(self, name):
"""Return the nearest reachable scope with the corresponding name.
"""
try:
return self.scopes[name]
except KeyError:
if self.parent is None:
raise NameError("scope {} is not reachable".format(name))
return self.parent.get_outer_scope(name)
def __contains__(self, name):
"""Return whether a name is reachable from the current scope.
"""
return (super().__contains__(name) or
any(name in scope for scope in self.parents()))
def __getitem__(self, name):
"""Return the object binding to a name, if the name is in scope.
"""
try:
return super().__getitem__(name)
except KeyError:
if self.parent is None:
raise NameError("name {} is not in scope".format(name))
return self.parent.__getitem__(name)
def __setitem__(self, *args, **kwargs):
"""Bind a name to an object inside the current scope.
"""
return super().__setitem__(*args, **kwargs)
def parents(self):
parent = self.parent
while parent is not None:
yield parent
parent = parent.parent
def show(self, buf=sys.stdout):
lead = ''
for scope in reversed(list(chain([self], self.parents()))):
for name, binding in scope.items():
buf.write(lead + str(name) + " : " + str(binding) + "\n")
lead = lead + ' ' * 4
class NameResolver(ast.NodeVisitor):
def visit_Namespace(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope, node.name.value)
self.generic_visit(node, node.scope)
def visit_Template(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope)
self.generic_visit(node, node.scope)
def visit_Identifier(self, node, current_scope):
if node.value not in current_scope:
raise NameError("unresolved reference {}".format(node.value))
node.resolved = current_scope[node.value]
def visit_QualifiedIdentifier(self, node, current_scope):
outer, *rest = node.quals
scope = current_scope.get_outer_scope(outer.value)
for qual in rest:
scope = getattr(scope, qual.value)
self.visit(node.name, scope)
node.resolved = node.name.resolved
|
<commit_before><commit_msg>Rewrite the name resolving pass.
This pass takes care of resolving which object are identifiers bound to.
It then stores the result by adding a 'resolved' attribute to each identifier
and qualified identifier node in the tree. These 'resolved' attributes can then
be accessed to refer to the object bound to the identifier, which is
invaluable during code generation.<commit_after>from .. import ast
import sys
from itertools import chain
class Scope(dict):
def __init__(self, parent=None, name=None, *args, **kwargs):
assert isinstance(parent, Scope) or parent is None
super().__init__(*args, **kwargs)
self.name = name
self.parent = parent
self.scopes = dict()
if parent is not None and self.name is not None:
self.parent.scopes.update({self.name:self})
def get_outer_scope(self, name):
"""Return the nearest reachable scope with the corresponding name.
"""
try:
return self.scopes[name]
except KeyError:
if self.parent is None:
raise NameError("scope {} is not reachable".format(name))
return self.parent.get_outer_scope(name)
def __contains__(self, name):
"""Return whether a name is reachable from the current scope.
"""
return (super().__contains__(name) or
any(name in scope for scope in self.parents()))
def __getitem__(self, name):
"""Return the object binding to a name, if the name is in scope.
"""
try:
return super().__getitem__(name)
except KeyError:
if self.parent is None:
raise NameError("name {} is not in scope".format(name))
return self.parent.__getitem__(name)
def __setitem__(self, *args, **kwargs):
"""Bind a name to an object inside the current scope.
"""
return super().__setitem__(*args, **kwargs)
def parents(self):
parent = self.parent
while parent is not None:
yield parent
parent = parent.parent
def show(self, buf=sys.stdout):
lead = ''
for scope in reversed(list(chain([self], self.parents()))):
for name, binding in scope.items():
buf.write(lead + str(name) + " : " + str(binding) + "\n")
lead = lead + ' ' * 4
class NameResolver(ast.NodeVisitor):
def visit_Namespace(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope, node.name.value)
self.generic_visit(node, node.scope)
def visit_Template(self, node, current_scope=Scope()):
current_scope[node.name.value] = node
node.scope = Scope(current_scope)
self.generic_visit(node, node.scope)
def visit_Identifier(self, node, current_scope):
if node.value not in current_scope:
raise NameError("unresolved reference {}".format(node.value))
node.resolved = current_scope[node.value]
def visit_QualifiedIdentifier(self, node, current_scope):
outer, *rest = node.quals
scope = current_scope.get_outer_scope(outer.value)
for qual in rest:
scope = getattr(scope, qual.value)
self.visit(node.name, scope)
node.resolved = node.name.resolved
|
|
7a04808fa00340d23a99436db741dc7e87d0f010
|
tickets/wagtail_hooks.py
|
tickets/wagtail_hooks.py
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from tickets import models
class QuestionAdmin(ModelAdmin):
model = models.Question
menu_icon = 'help'
list_display = ("subject", "author", "status")
modeladmin_register(QuestionAdmin)
|
Move tickets admin to wagtail
|
Move tickets admin to wagtail
TODO: adding responses should be supported
touch #162
|
Python
|
agpl-3.0
|
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
|
Move tickets admin to wagtail
TODO: adding responses should be supported
touch #162
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from tickets import models
class QuestionAdmin(ModelAdmin):
model = models.Question
menu_icon = 'help'
list_display = ("subject", "author", "status")
modeladmin_register(QuestionAdmin)
|
<commit_before><commit_msg>Move tickets admin to wagtail
TODO: adding responses should be supported
touch #162<commit_after>
|
from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from tickets import models
class QuestionAdmin(ModelAdmin):
model = models.Question
menu_icon = 'help'
list_display = ("subject", "author", "status")
modeladmin_register(QuestionAdmin)
|
Move tickets admin to wagtail
TODO: adding responses should be supported
touch #162from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from tickets import models
class QuestionAdmin(ModelAdmin):
model = models.Question
menu_icon = 'help'
list_display = ("subject", "author", "status")
modeladmin_register(QuestionAdmin)
|
<commit_before><commit_msg>Move tickets admin to wagtail
TODO: adding responses should be supported
touch #162<commit_after>from wagtail.contrib.modeladmin.options import ModelAdmin, modeladmin_register
from tickets import models
class QuestionAdmin(ModelAdmin):
model = models.Question
menu_icon = 'help'
list_display = ("subject", "author", "status")
modeladmin_register(QuestionAdmin)
|
|
9bd57f75977655ddf6eb57729eda41832db2ba8c
|
scripts/add_alpha/add_alpha.py
|
scripts/add_alpha/add_alpha.py
|
#!/usr/bin/python3
# coding: utf-8
'''
Adding an alpha pixel to an image and writing as a ARGB image file
Copyright (C) 2018 Zettsu Tatsuya
'''
import cv2
from optparse import OptionParser
import numpy as np
def main():
parser = OptionParser()
parser.add_option('-i', '--input', dest='in_image_filename', default='in.png',
help='Input image filename')
parser.add_option('-o', '--output', dest='out_image_filename', default='out.png',
help='Output image filename')
(options, args) = parser.parse_args()
in_img = cv2.imread(options.in_image_filename)
shape = in_img.shape
b_channel, g_channel, r_channel = cv2.split(in_img)
alpha_channel = np.ones((shape[0], shape[1], 1), dtype=b_channel.dtype) * 255
# Makes the right-bottom pixel transparent
alpha_channel[shape[0]-1, shape[1]-1] = 0
out_img = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
cv2.imwrite(options.out_image_filename, out_img)
if __name__ == "__main__":
main()
|
Add a script to make a transparent PNG file
|
Add a script to make a transparent PNG file
|
Python
|
mit
|
zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend
|
Add a script to make a transparent PNG file
|
#!/usr/bin/python3
# coding: utf-8
'''
Adding an alpha pixel to an image and writing as a ARGB image file
Copyright (C) 2018 Zettsu Tatsuya
'''
import cv2
from optparse import OptionParser
import numpy as np
def main():
parser = OptionParser()
parser.add_option('-i', '--input', dest='in_image_filename', default='in.png',
help='Input image filename')
parser.add_option('-o', '--output', dest='out_image_filename', default='out.png',
help='Output image filename')
(options, args) = parser.parse_args()
in_img = cv2.imread(options.in_image_filename)
shape = in_img.shape
b_channel, g_channel, r_channel = cv2.split(in_img)
alpha_channel = np.ones((shape[0], shape[1], 1), dtype=b_channel.dtype) * 255
# Makes the right-bottom pixel transparent
alpha_channel[shape[0]-1, shape[1]-1] = 0
out_img = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
cv2.imwrite(options.out_image_filename, out_img)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script to make a transparent PNG file<commit_after>
|
#!/usr/bin/python3
# coding: utf-8
'''
Adding an alpha pixel to an image and writing as a ARGB image file
Copyright (C) 2018 Zettsu Tatsuya
'''
import cv2
from optparse import OptionParser
import numpy as np
def main():
parser = OptionParser()
parser.add_option('-i', '--input', dest='in_image_filename', default='in.png',
help='Input image filename')
parser.add_option('-o', '--output', dest='out_image_filename', default='out.png',
help='Output image filename')
(options, args) = parser.parse_args()
in_img = cv2.imread(options.in_image_filename)
shape = in_img.shape
b_channel, g_channel, r_channel = cv2.split(in_img)
alpha_channel = np.ones((shape[0], shape[1], 1), dtype=b_channel.dtype) * 255
# Makes the right-bottom pixel transparent
alpha_channel[shape[0]-1, shape[1]-1] = 0
out_img = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
cv2.imwrite(options.out_image_filename, out_img)
if __name__ == "__main__":
main()
|
Add a script to make a transparent PNG file#!/usr/bin/python3
# coding: utf-8
'''
Adding an alpha pixel to an image and writing as a ARGB image file
Copyright (C) 2018 Zettsu Tatsuya
'''
import cv2
from optparse import OptionParser
import numpy as np
def main():
parser = OptionParser()
parser.add_option('-i', '--input', dest='in_image_filename', default='in.png',
help='Input image filename')
parser.add_option('-o', '--output', dest='out_image_filename', default='out.png',
help='Output image filename')
(options, args) = parser.parse_args()
in_img = cv2.imread(options.in_image_filename)
shape = in_img.shape
b_channel, g_channel, r_channel = cv2.split(in_img)
alpha_channel = np.ones((shape[0], shape[1], 1), dtype=b_channel.dtype) * 255
# Makes the right-bottom pixel transparent
alpha_channel[shape[0]-1, shape[1]-1] = 0
out_img = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
cv2.imwrite(options.out_image_filename, out_img)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a script to make a transparent PNG file<commit_after>#!/usr/bin/python3
# coding: utf-8
'''
Adding an alpha pixel to an image and writing as a ARGB image file
Copyright (C) 2018 Zettsu Tatsuya
'''
import cv2
from optparse import OptionParser
import numpy as np
def main():
parser = OptionParser()
parser.add_option('-i', '--input', dest='in_image_filename', default='in.png',
help='Input image filename')
parser.add_option('-o', '--output', dest='out_image_filename', default='out.png',
help='Output image filename')
(options, args) = parser.parse_args()
in_img = cv2.imread(options.in_image_filename)
shape = in_img.shape
b_channel, g_channel, r_channel = cv2.split(in_img)
alpha_channel = np.ones((shape[0], shape[1], 1), dtype=b_channel.dtype) * 255
# Makes the right-bottom pixel transparent
alpha_channel[shape[0]-1, shape[1]-1] = 0
out_img = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
cv2.imwrite(options.out_image_filename, out_img)
if __name__ == "__main__":
main()
|
|
9141e9e859bab8999e785f796d4613d66a765ba9
|
mangaki/mangaki/migrations/0013_auto_20150616_0919.py
|
mangaki/mangaki/migrations/0013_auto_20150616_0919.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0012_auto_20150616_0832'),
]
operations = [
migrations.AddField(
model_name='anime',
name='anime_type',
field=models.TextField(max_length=42, default=''),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='author',
field=models.ForeignKey(default=1, to='mangaki.Artist', related_name='authored'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='editor',
field=models.ForeignKey(default=1, to='mangaki.Editor'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='genre',
field=models.ManyToManyField(to='mangaki.Genre'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='nb_episodes',
field=models.TextField(max_length=16, default='Inconnu'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='origin',
field=models.CharField(max_length=10, default='', choices=[('japon', 'Japon'), ('coree', 'Coree'), ('france', 'France'), ('chine', 'Chine'), ('usa', 'USA'), ('allemagne', 'Allemagne'), ('taiwan', 'Taiwan'), ('espagne', 'Espagne'), ('angleterre', 'Angleterre'), ('hong-kong', 'Hong Kong'), ('italie', 'Italie'), ('inconnue', 'Inconnue'), ('intl', 'International')]),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='studio',
field=models.ForeignKey(default=1, to='mangaki.Studio'),
preserve_default=True,
),
]
|
Add attributes to Anime migration
|
Add attributes to Anime migration
|
Python
|
agpl-3.0
|
RaitoBezarius/mangaki,Elarnon/mangaki,Mako-kun/mangaki,Elarnon/mangaki,Mako-kun/mangaki,Mako-kun/mangaki,Elarnon/mangaki,RaitoBezarius/mangaki,RaitoBezarius/mangaki
|
Add attributes to Anime migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0012_auto_20150616_0832'),
]
operations = [
migrations.AddField(
model_name='anime',
name='anime_type',
field=models.TextField(max_length=42, default=''),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='author',
field=models.ForeignKey(default=1, to='mangaki.Artist', related_name='authored'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='editor',
field=models.ForeignKey(default=1, to='mangaki.Editor'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='genre',
field=models.ManyToManyField(to='mangaki.Genre'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='nb_episodes',
field=models.TextField(max_length=16, default='Inconnu'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='origin',
field=models.CharField(max_length=10, default='', choices=[('japon', 'Japon'), ('coree', 'Coree'), ('france', 'France'), ('chine', 'Chine'), ('usa', 'USA'), ('allemagne', 'Allemagne'), ('taiwan', 'Taiwan'), ('espagne', 'Espagne'), ('angleterre', 'Angleterre'), ('hong-kong', 'Hong Kong'), ('italie', 'Italie'), ('inconnue', 'Inconnue'), ('intl', 'International')]),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='studio',
field=models.ForeignKey(default=1, to='mangaki.Studio'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add attributes to Anime migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0012_auto_20150616_0832'),
]
operations = [
migrations.AddField(
model_name='anime',
name='anime_type',
field=models.TextField(max_length=42, default=''),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='author',
field=models.ForeignKey(default=1, to='mangaki.Artist', related_name='authored'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='editor',
field=models.ForeignKey(default=1, to='mangaki.Editor'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='genre',
field=models.ManyToManyField(to='mangaki.Genre'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='nb_episodes',
field=models.TextField(max_length=16, default='Inconnu'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='origin',
field=models.CharField(max_length=10, default='', choices=[('japon', 'Japon'), ('coree', 'Coree'), ('france', 'France'), ('chine', 'Chine'), ('usa', 'USA'), ('allemagne', 'Allemagne'), ('taiwan', 'Taiwan'), ('espagne', 'Espagne'), ('angleterre', 'Angleterre'), ('hong-kong', 'Hong Kong'), ('italie', 'Italie'), ('inconnue', 'Inconnue'), ('intl', 'International')]),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='studio',
field=models.ForeignKey(default=1, to='mangaki.Studio'),
preserve_default=True,
),
]
|
Add attributes to Anime migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0012_auto_20150616_0832'),
]
operations = [
migrations.AddField(
model_name='anime',
name='anime_type',
field=models.TextField(max_length=42, default=''),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='author',
field=models.ForeignKey(default=1, to='mangaki.Artist', related_name='authored'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='editor',
field=models.ForeignKey(default=1, to='mangaki.Editor'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='genre',
field=models.ManyToManyField(to='mangaki.Genre'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='nb_episodes',
field=models.TextField(max_length=16, default='Inconnu'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='origin',
field=models.CharField(max_length=10, default='', choices=[('japon', 'Japon'), ('coree', 'Coree'), ('france', 'France'), ('chine', 'Chine'), ('usa', 'USA'), ('allemagne', 'Allemagne'), ('taiwan', 'Taiwan'), ('espagne', 'Espagne'), ('angleterre', 'Angleterre'), ('hong-kong', 'Hong Kong'), ('italie', 'Italie'), ('inconnue', 'Inconnue'), ('intl', 'International')]),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='studio',
field=models.ForeignKey(default=1, to='mangaki.Studio'),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add attributes to Anime migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0012_auto_20150616_0832'),
]
operations = [
migrations.AddField(
model_name='anime',
name='anime_type',
field=models.TextField(max_length=42, default=''),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='author',
field=models.ForeignKey(default=1, to='mangaki.Artist', related_name='authored'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='editor',
field=models.ForeignKey(default=1, to='mangaki.Editor'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='genre',
field=models.ManyToManyField(to='mangaki.Genre'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='nb_episodes',
field=models.TextField(max_length=16, default='Inconnu'),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='origin',
field=models.CharField(max_length=10, default='', choices=[('japon', 'Japon'), ('coree', 'Coree'), ('france', 'France'), ('chine', 'Chine'), ('usa', 'USA'), ('allemagne', 'Allemagne'), ('taiwan', 'Taiwan'), ('espagne', 'Espagne'), ('angleterre', 'Angleterre'), ('hong-kong', 'Hong Kong'), ('italie', 'Italie'), ('inconnue', 'Inconnue'), ('intl', 'International')]),
preserve_default=True,
),
migrations.AddField(
model_name='anime',
name='studio',
field=models.ForeignKey(default=1, to='mangaki.Studio'),
preserve_default=True,
),
]
|
|
00c0e3b1309416186123fe81f298f9422446d5fa
|
testfixtures/seating.py
|
testfixtures/seating.py
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
Add function to create a seat group test fixture
|
Add function to create a seat group test fixture
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
Add function to create a seat group test fixture
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
<commit_before><commit_msg>Add function to create a seat group test fixture<commit_after>
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
Add function to create a seat group test fixture# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
<commit_before><commit_msg>Add function to create a seat group test fixture<commit_after># -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
|
b77af260bfdfbb7d93a488d9bc78d343a9d0b0ea
|
pipeline/writer.py
|
pipeline/writer.py
|
from pipeline import *
import json
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
print len(self.buffer)
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
self.buffer = []
|
Implement JSONWriter Class for Saving into files
|
Implement JSONWriter Class for Saving into files
- base file name
- save every
- output folder
- buffer documents and save when reach maximum file size
|
Python
|
mit
|
hadyelsahar/RE-NLG-Dataset,hadyelsahar/RE-NLG-Dataset
|
Implement JSONWriter Class for Saving into files
- base file name
- save every
- output folder
- buffer documents and save when reach maximum file size
|
from pipeline import *
import json
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
print len(self.buffer)
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
self.buffer = []
|
<commit_before><commit_msg>Implement JSONWriter Class for Saving into files
- base file name
- save every
- output folder
- buffer documents and save when reach maximum file size<commit_after>
|
from pipeline import *
import json
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
print len(self.buffer)
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
self.buffer = []
|
Implement JSONWriter Class for Saving into files
- base file name
- save every
- output folder
- buffer documents and save when reach maximum file sizefrom pipeline import *
import json
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
print len(self.buffer)
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
self.buffer = []
|
<commit_before><commit_msg>Implement JSONWriter Class for Saving into files
- base file name
- save every
- output folder
- buffer documents and save when reach maximum file size<commit_after>from pipeline import *
import json
import os
class JsonWriter(BasePipeline):
def __init__(self, outputfolder, basefilename=None, filesize=10000):
"""
when attached to the pipeline this file log all json
:param outputfolder: folder to save output files in
:param basefilename: filename prefix to add before all file names
:param filesize:
"""
self.outputfolder = outputfolder
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
self.basefilename = basefilename
self.filesize = filesize
self.counter = 0
self.buffer = []
def run(self, document):
self.counter += 1
self.buffer.append(document.toJSON())
print len(self.buffer)
if self.counter % self.filesize == 0:
self.flush()
return document
def flush(self):
filename = "%s-%s.json" % (self.counter-self.filesize, self.counter)
filename = "%s_%s" % (self.basefilename, filename) if self.basefilename is not None else filename
filename = os.path.join(self.outputfolder, filename)
with open(filename, 'w') as outfile:
json.dump(self.buffer, outfile)
print "Saved file %s" % filename
self.buffer = []
|
|
719b2ac28e27f8e8b0d0acea315c355e7a34cd25
|
cerbero/commands/genvsprops.py
|
cerbero/commands/genvsprops.py
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.commands import Command, register_command
from cerbero.ide.common import PkgConfig
from cerbero.ide.vs.pkgconfig2vsprops import PkgConfig2VSProps, CommonVSProps
from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
DEFAULT_PREFIX_MACRO = 'CERBERO_SDK_ROOT'
class GenVSProps(Command):
doc = N_('Generate Visual Studio property sheets to use the SDK from VS')
name = 'genvsprops'
def __init__(self):
Command.__init__(self,
[ArgparseArgument('-o', '--output_dir', default='.',
help=_('output directory where .vsprops files will be saved')),
])
def run(self, config, args):
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for pc in PkgConfig.list_all():
m.action('Created %s.vsprops' % pc)
p2v = PkgConfig2VSProps(pc, config.prefix, '$(%s)' %
DEFAULT_PREFIX_MACRO, False)
p2v.create(args.output_dir)
m.action('Created %s.vsprops' % pc)
common = CommonVSProps(config.prefix, DEFAULT_PREFIX_MACRO)
common.create(args.output_dir)
m.message('Property sheets files were sucessfully created in %s' %
os.path.abspath(args.output_dir))
register_command(GenVSProps)
|
Add command to create VS property sheets for all pkgconfig pacakges
|
Add command to create VS property sheets for all pkgconfig pacakges
|
Python
|
lgpl-2.1
|
nzjrs/cerbero,centricular/cerbero,justinjoy/cerbero,multipath-rtp/cerbero,superdump/cerbero,cee1/cerbero-mac,ylatuya/cerbero,lubosz/cerbero,ford-prefect/cerbero,nzjrs/cerbero,fluendo/cerbero,brion/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,GStreamer/cerbero,AlertMe/cerbero,ylatuya/cerbero,sdroege/cerbero,lubosz/cerbero,centricular/cerbero,atsushieno/cerbero,ramaxlo/cerbero,cee1/cerbero-mac,sdroege/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,EricssonResearch/cerbero,nzjrs/cerbero,ikonst/cerbero,jackjansen/cerbero-2013,superdump/cerbero,GStreamer/cerbero,atsushieno/cerbero,shoreflyer/cerbero,davibe/cerbero,fluendo/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,BigBrother-International/gst-cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,OptoFidelity/cerbero,ramaxlo/cerbero,brion/cerbero,flexVDI/cerbero,AlertMe/cerbero,ylatuya/cerbero,OptoFidelity/cerbero,ford-prefect/cerbero,BigBrother-International/gst-cerbero,fluendo/cerbero,jackjansen/cerbero-2013,jackjansen/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,multipath-rtp/cerbero,OptoFidelity/cerbero,atsushieno/cerbero,nirbheek/cerbero-old,nirbheek/cerbero,jackjansen/cerbero,flexVDI/cerbero,ford-prefect/cerbero,ramaxlo/cerbero,flexVDI/cerbero,nirbheek/cerbero,nicolewu/cerbero,AlertMe/cerbero,multipath-rtp/cerbero,jackjansen/cerbero,superdump/cerbero,nzjrs/cerbero,lubosz/cerbero,shoreflyer/cerbero,davibe/cerbero,jackjansen/cerbero-2013,shoreflyer/cerbero,sdroege/cerbero,brion/cerbero,nirbheek/cerbero,davibe/cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,BigBrother-International/gst-cerbero,justinjoy/cerbero,centricular/cerbero,nirbheek/cerbero,jackjansen/cerbero,nirbheek/cerbero-old,shoreflyer/cerbero,ramaxlo/cerbero,ylatuya/cerbero,EricssonResearch/cerbero,atsushieno/cerbero,cee1/cerbero-mac,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,nicolewu/cerbero,ikonst/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,EricssonResearch/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,nirbheek/cerbero-old,flexVDI/cerbero,lubosz/cerbero,OptoFidelity/cerbero,GStreamer/cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,AlertMe/cerbero,BigBrother-International/gst-cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,atsushieno/cerbero,superdump/cerbero,BigBrother-International/gst-cerbero,centricular/cerbero,GStreamer/cerbero,flexVDI/cerbero,jackjansen/cerbero-2013,sdroege/cerbero,justinjoy/cerbero,jackjansen/cerbero-2013,nzjrs/cerbero,centricular/cerbero,nirbheek/cerbero-old,brion/cerbero,ramaxlo/cerbero,ikonst/cerbero,EricssonResearch/cerbero,multipath-rtp/cerbero,multipath-rtp/cerbero,GStreamer/cerbero,EricssonResearch/cerbero,cee1/cerbero-mac,brion/cerbero,ford-prefect/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,davibe/cerbero,sdroege/cerbero,ikonst/cerbero,nicolewu/cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,AlertMe/cerbero,ikonst/cerbero,justinjoy/cerbero,fluendo/cerbero,shoreflyer/cerbero,fluendo/cerbero
|
Add command to create VS property sheets for all pkgconfig pacakges
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.commands import Command, register_command
from cerbero.ide.common import PkgConfig
from cerbero.ide.vs.pkgconfig2vsprops import PkgConfig2VSProps, CommonVSProps
from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
DEFAULT_PREFIX_MACRO = 'CERBERO_SDK_ROOT'
class GenVSProps(Command):
doc = N_('Generate Visual Studio property sheets to use the SDK from VS')
name = 'genvsprops'
def __init__(self):
Command.__init__(self,
[ArgparseArgument('-o', '--output_dir', default='.',
help=_('output directory where .vsprops files will be saved')),
])
def run(self, config, args):
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for pc in PkgConfig.list_all():
m.action('Created %s.vsprops' % pc)
p2v = PkgConfig2VSProps(pc, config.prefix, '$(%s)' %
DEFAULT_PREFIX_MACRO, False)
p2v.create(args.output_dir)
m.action('Created %s.vsprops' % pc)
common = CommonVSProps(config.prefix, DEFAULT_PREFIX_MACRO)
common.create(args.output_dir)
m.message('Property sheets files were sucessfully created in %s' %
os.path.abspath(args.output_dir))
register_command(GenVSProps)
|
<commit_before><commit_msg>Add command to create VS property sheets for all pkgconfig pacakges<commit_after>
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.commands import Command, register_command
from cerbero.ide.common import PkgConfig
from cerbero.ide.vs.pkgconfig2vsprops import PkgConfig2VSProps, CommonVSProps
from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
DEFAULT_PREFIX_MACRO = 'CERBERO_SDK_ROOT'
class GenVSProps(Command):
doc = N_('Generate Visual Studio property sheets to use the SDK from VS')
name = 'genvsprops'
def __init__(self):
Command.__init__(self,
[ArgparseArgument('-o', '--output_dir', default='.',
help=_('output directory where .vsprops files will be saved')),
])
def run(self, config, args):
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for pc in PkgConfig.list_all():
m.action('Created %s.vsprops' % pc)
p2v = PkgConfig2VSProps(pc, config.prefix, '$(%s)' %
DEFAULT_PREFIX_MACRO, False)
p2v.create(args.output_dir)
m.action('Created %s.vsprops' % pc)
common = CommonVSProps(config.prefix, DEFAULT_PREFIX_MACRO)
common.create(args.output_dir)
m.message('Property sheets files were sucessfully created in %s' %
os.path.abspath(args.output_dir))
register_command(GenVSProps)
|
Add command to create VS property sheets for all pkgconfig pacakges# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.commands import Command, register_command
from cerbero.ide.common import PkgConfig
from cerbero.ide.vs.pkgconfig2vsprops import PkgConfig2VSProps, CommonVSProps
from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
DEFAULT_PREFIX_MACRO = 'CERBERO_SDK_ROOT'
class GenVSProps(Command):
doc = N_('Generate Visual Studio property sheets to use the SDK from VS')
name = 'genvsprops'
def __init__(self):
Command.__init__(self,
[ArgparseArgument('-o', '--output_dir', default='.',
help=_('output directory where .vsprops files will be saved')),
])
def run(self, config, args):
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for pc in PkgConfig.list_all():
m.action('Created %s.vsprops' % pc)
p2v = PkgConfig2VSProps(pc, config.prefix, '$(%s)' %
DEFAULT_PREFIX_MACRO, False)
p2v.create(args.output_dir)
m.action('Created %s.vsprops' % pc)
common = CommonVSProps(config.prefix, DEFAULT_PREFIX_MACRO)
common.create(args.output_dir)
m.message('Property sheets files were sucessfully created in %s' %
os.path.abspath(args.output_dir))
register_command(GenVSProps)
|
<commit_before><commit_msg>Add command to create VS property sheets for all pkgconfig pacakges<commit_after># cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.commands import Command, register_command
from cerbero.ide.common import PkgConfig
from cerbero.ide.vs.pkgconfig2vsprops import PkgConfig2VSProps, CommonVSProps
from cerbero.utils import _, N_, ArgparseArgument
from cerbero.utils import messages as m
DEFAULT_PREFIX_MACRO = 'CERBERO_SDK_ROOT'
class GenVSProps(Command):
doc = N_('Generate Visual Studio property sheets to use the SDK from VS')
name = 'genvsprops'
def __init__(self):
Command.__init__(self,
[ArgparseArgument('-o', '--output_dir', default='.',
help=_('output directory where .vsprops files will be saved')),
])
def run(self, config, args):
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
for pc in PkgConfig.list_all():
m.action('Created %s.vsprops' % pc)
p2v = PkgConfig2VSProps(pc, config.prefix, '$(%s)' %
DEFAULT_PREFIX_MACRO, False)
p2v.create(args.output_dir)
m.action('Created %s.vsprops' % pc)
common = CommonVSProps(config.prefix, DEFAULT_PREFIX_MACRO)
common.create(args.output_dir)
m.message('Property sheets files were sucessfully created in %s' %
os.path.abspath(args.output_dir))
register_command(GenVSProps)
|
|
13b351616719ca3a6ba2fa08f1c8446b38aeb755
|
hooks/hook-PyQt5.QtGui.py
|
hooks/hook-PyQt5.QtGui.py
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
hiddenimports = ['sip', 'PyQt5.QtCore']
from PyInstaller.utils.hooks import qt5_plugins_binaries
from PyInstaller.compat import is_linux
binaries = []
binaries.extend(qt5_plugins_binaries('accessible'))
binaries.extend(qt5_plugins_binaries('iconengines'))
binaries.extend(qt5_plugins_binaries('imageformats'))
binaries.extend(qt5_plugins_binaries('inputmethods'))
binaries.extend(qt5_plugins_binaries('graphicssystems'))
binaries.extend(qt5_plugins_binaries('platforms'))
if is_linux:
binaries.extend(qt5_plugins_binaries('platformthemes'))
|
Add hook for missing platformthemes on linux
|
Add hook for missing platformthemes on linux
|
Python
|
mit
|
ucoin-io/cutecoin,ucoin-io/cutecoin,ucoin-io/cutecoin
|
Add hook for missing platformthemes on linux
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
hiddenimports = ['sip', 'PyQt5.QtCore']
from PyInstaller.utils.hooks import qt5_plugins_binaries
from PyInstaller.compat import is_linux
binaries = []
binaries.extend(qt5_plugins_binaries('accessible'))
binaries.extend(qt5_plugins_binaries('iconengines'))
binaries.extend(qt5_plugins_binaries('imageformats'))
binaries.extend(qt5_plugins_binaries('inputmethods'))
binaries.extend(qt5_plugins_binaries('graphicssystems'))
binaries.extend(qt5_plugins_binaries('platforms'))
if is_linux:
binaries.extend(qt5_plugins_binaries('platformthemes'))
|
<commit_before><commit_msg>Add hook for missing platformthemes on linux<commit_after>
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
hiddenimports = ['sip', 'PyQt5.QtCore']
from PyInstaller.utils.hooks import qt5_plugins_binaries
from PyInstaller.compat import is_linux
binaries = []
binaries.extend(qt5_plugins_binaries('accessible'))
binaries.extend(qt5_plugins_binaries('iconengines'))
binaries.extend(qt5_plugins_binaries('imageformats'))
binaries.extend(qt5_plugins_binaries('inputmethods'))
binaries.extend(qt5_plugins_binaries('graphicssystems'))
binaries.extend(qt5_plugins_binaries('platforms'))
if is_linux:
binaries.extend(qt5_plugins_binaries('platformthemes'))
|
Add hook for missing platformthemes on linux#-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
hiddenimports = ['sip', 'PyQt5.QtCore']
from PyInstaller.utils.hooks import qt5_plugins_binaries
from PyInstaller.compat import is_linux
binaries = []
binaries.extend(qt5_plugins_binaries('accessible'))
binaries.extend(qt5_plugins_binaries('iconengines'))
binaries.extend(qt5_plugins_binaries('imageformats'))
binaries.extend(qt5_plugins_binaries('inputmethods'))
binaries.extend(qt5_plugins_binaries('graphicssystems'))
binaries.extend(qt5_plugins_binaries('platforms'))
if is_linux:
binaries.extend(qt5_plugins_binaries('platformthemes'))
|
<commit_before><commit_msg>Add hook for missing platformthemes on linux<commit_after>#-----------------------------------------------------------------------------
# Copyright (c) 2013-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
hiddenimports = ['sip', 'PyQt5.QtCore']
from PyInstaller.utils.hooks import qt5_plugins_binaries
from PyInstaller.compat import is_linux
binaries = []
binaries.extend(qt5_plugins_binaries('accessible'))
binaries.extend(qt5_plugins_binaries('iconengines'))
binaries.extend(qt5_plugins_binaries('imageformats'))
binaries.extend(qt5_plugins_binaries('inputmethods'))
binaries.extend(qt5_plugins_binaries('graphicssystems'))
binaries.extend(qt5_plugins_binaries('platforms'))
if is_linux:
binaries.extend(qt5_plugins_binaries('platformthemes'))
|
|
95007220fbe4c5554a92b2b688fb9724ec949ebd
|
tests/rules/test_dry.py
|
tests/rules/test_dry.py
|
import pytest
from thefuck.rules.dry import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='cd cd foo'),
Command(script='git git push origin/master')])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command('cd cd foo'), 'cd foo'),
(Command('git git push origin/master'), 'git push origin/master')])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add a test for the DRY rule
|
Add a test for the DRY rule
|
Python
|
mit
|
mlk/thefuck,PLNech/thefuck,qrqiuren/thefuck,barneyElDinosaurio/thefuck,Clpsplug/thefuck,SimenB/thefuck,ostree/thefuck,PLNech/thefuck,BertieJim/thefuck,mcarton/thefuck,roth1002/thefuck,nvbn/thefuck,ytjiang/thefuck,Aeron/thefuck,scorphus/thefuck,hxddh/thefuck,thesoulkiller/thefuck,SimenB/thefuck,subajat1/thefuck,vanita5/thefuck,suxinde2009/thefuck,hxddh/thefuck,princeofdarkness76/thefuck,manashmndl/thefuck,beni55/thefuck,nvbn/thefuck,lawrencebenson/thefuck,bigplus/thefuck,mbbill/thefuck,LawrenceHan/thefuck,nwinkler/thefuck,princeofdarkness76/thefuck,zhangzhishan/thefuck,beni55/thefuck,Clpsplug/thefuck,NguyenHoaiNam/thefuck,levythu/thefuck,thinkerchan/thefuck,vanita5/thefuck,gaurav9991/thefuck,manashmndl/thefuck,BertieJim/thefuck,qingying5810/thefuck,thesoulkiller/thefuck,scorphus/thefuck,LawrenceHan/thefuck,AntonChankin/thefuck,roth1002/thefuck,lawrencebenson/thefuck,ostree/thefuck,MJerty/thefuck,MJerty/thefuck,sekaiamber/thefuck,thinkerchan/thefuck,AntonChankin/thefuck,mcarton/thefuck,bigplus/thefuck,redreamality/thefuck,levythu/thefuck,subajat1/thefuck,bugaevc/thefuck,petr-tichy/thefuck,mlk/thefuck,artiya4u/thefuck,redreamality/thefuck,barneyElDinosaurio/thefuck,gogobebe2/thefuck
|
Add a test for the DRY rule
|
import pytest
from thefuck.rules.dry import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='cd cd foo'),
Command(script='git git push origin/master')])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command('cd cd foo'), 'cd foo'),
(Command('git git push origin/master'), 'git push origin/master')])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
<commit_before><commit_msg>Add a test for the DRY rule<commit_after>
|
import pytest
from thefuck.rules.dry import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='cd cd foo'),
Command(script='git git push origin/master')])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command('cd cd foo'), 'cd foo'),
(Command('git git push origin/master'), 'git push origin/master')])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add a test for the DRY ruleimport pytest
from thefuck.rules.dry import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='cd cd foo'),
Command(script='git git push origin/master')])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command('cd cd foo'), 'cd foo'),
(Command('git git push origin/master'), 'git push origin/master')])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
<commit_before><commit_msg>Add a test for the DRY rule<commit_after>import pytest
from thefuck.rules.dry import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='cd cd foo'),
Command(script='git git push origin/master')])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command('cd cd foo'), 'cd foo'),
(Command('git git push origin/master'), 'git push origin/master')])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
|
a6e48f4c27fd934e22833b3823d1a049a85bbd8d
|
comics/comics/abstrusegoose.py
|
comics/comics/abstrusegoose.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Abstruse Goose'
language = 'en'
url = 'http://www.abstrusegoose.com/'
start_date = '2008-02-01'
rights = 'lcfr, CC BY-NC 3.0 US'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
def crawl(self, pub_date):
feed = self.parse_feed('http://abstrusegoose.com/feed/atom')
for entry in feed.for_date(pub_date):
url = entry.content0.src('img[src*="/strips/"]')
title = entry.title
text = entry.content0.title('img[src*="/strips/"]')
return CrawlerResult(url, title, text)
|
Add crawler for Abstruse Goose
|
Add crawler for Abstruse Goose
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,klette/comics,datagutten/comics,datagutten/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics,jodal/comics,klette/comics
|
Add crawler for Abstruse Goose
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Abstruse Goose'
language = 'en'
url = 'http://www.abstrusegoose.com/'
start_date = '2008-02-01'
rights = 'lcfr, CC BY-NC 3.0 US'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
def crawl(self, pub_date):
feed = self.parse_feed('http://abstrusegoose.com/feed/atom')
for entry in feed.for_date(pub_date):
url = entry.content0.src('img[src*="/strips/"]')
title = entry.title
text = entry.content0.title('img[src*="/strips/"]')
return CrawlerResult(url, title, text)
|
<commit_before><commit_msg>Add crawler for Abstruse Goose<commit_after>
|
from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Abstruse Goose'
language = 'en'
url = 'http://www.abstrusegoose.com/'
start_date = '2008-02-01'
rights = 'lcfr, CC BY-NC 3.0 US'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
def crawl(self, pub_date):
feed = self.parse_feed('http://abstrusegoose.com/feed/atom')
for entry in feed.for_date(pub_date):
url = entry.content0.src('img[src*="/strips/"]')
title = entry.title
text = entry.content0.title('img[src*="/strips/"]')
return CrawlerResult(url, title, text)
|
Add crawler for Abstruse Goosefrom comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Abstruse Goose'
language = 'en'
url = 'http://www.abstrusegoose.com/'
start_date = '2008-02-01'
rights = 'lcfr, CC BY-NC 3.0 US'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
def crawl(self, pub_date):
feed = self.parse_feed('http://abstrusegoose.com/feed/atom')
for entry in feed.for_date(pub_date):
url = entry.content0.src('img[src*="/strips/"]')
title = entry.title
text = entry.content0.title('img[src*="/strips/"]')
return CrawlerResult(url, title, text)
|
<commit_before><commit_msg>Add crawler for Abstruse Goose<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerResult
from comics.meta.base import MetaBase
class Meta(MetaBase):
name = 'Abstruse Goose'
language = 'en'
url = 'http://www.abstrusegoose.com/'
start_date = '2008-02-01'
rights = 'lcfr, CC BY-NC 3.0 US'
class Crawler(CrawlerBase):
history_capable_days = 10
schedule = 'Mo,We,Fr'
def crawl(self, pub_date):
feed = self.parse_feed('http://abstrusegoose.com/feed/atom')
for entry in feed.for_date(pub_date):
url = entry.content0.src('img[src*="/strips/"]')
title = entry.title
text = entry.content0.title('img[src*="/strips/"]')
return CrawlerResult(url, title, text)
|
|
5c64cd356aeff7c8996222143fc53e7e9b442134
|
cpm_data/migrations/0014_remove_jury_duplicates.py
|
cpm_data/migrations/0014_remove_jury_duplicates.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_duplicate_jury_members(apps, schema_editor):
jury_names = [
"Irina Demyanova",
"Carin Bräck",
"Pierre-Luc Vaillancourt",
]
JuryMember = apps.get_model('cpm_data.JuryMember')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
for name in jury_names:
jury_members = JuryMember.objects.filter(name_en=name).order_by('id')
original = jury_members[0]
duplicates = jury_members[1:]
for duplicate in duplicates:
qs = SeasonJuryMember.objects.filter(jury_member=duplicate)
qs.update(jury_member=original)
duplicate.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0013_update_jury_pavel_ivanov'),
]
operations = [
migrations.RunPython(
remove_duplicate_jury_members,
lambda apps, scema_editor: None
)
]
|
Add cpm_data migration getting rid of known jury duplicates
|
Add cpm_data migration getting rid of known jury duplicates
|
Python
|
unlicense
|
kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by
|
Add cpm_data migration getting rid of known jury duplicates
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_duplicate_jury_members(apps, schema_editor):
jury_names = [
"Irina Demyanova",
"Carin Bräck",
"Pierre-Luc Vaillancourt",
]
JuryMember = apps.get_model('cpm_data.JuryMember')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
for name in jury_names:
jury_members = JuryMember.objects.filter(name_en=name).order_by('id')
original = jury_members[0]
duplicates = jury_members[1:]
for duplicate in duplicates:
qs = SeasonJuryMember.objects.filter(jury_member=duplicate)
qs.update(jury_member=original)
duplicate.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0013_update_jury_pavel_ivanov'),
]
operations = [
migrations.RunPython(
remove_duplicate_jury_members,
lambda apps, scema_editor: None
)
]
|
<commit_before><commit_msg>Add cpm_data migration getting rid of known jury duplicates<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_duplicate_jury_members(apps, schema_editor):
jury_names = [
"Irina Demyanova",
"Carin Bräck",
"Pierre-Luc Vaillancourt",
]
JuryMember = apps.get_model('cpm_data.JuryMember')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
for name in jury_names:
jury_members = JuryMember.objects.filter(name_en=name).order_by('id')
original = jury_members[0]
duplicates = jury_members[1:]
for duplicate in duplicates:
qs = SeasonJuryMember.objects.filter(jury_member=duplicate)
qs.update(jury_member=original)
duplicate.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0013_update_jury_pavel_ivanov'),
]
operations = [
migrations.RunPython(
remove_duplicate_jury_members,
lambda apps, scema_editor: None
)
]
|
Add cpm_data migration getting rid of known jury duplicates# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_duplicate_jury_members(apps, schema_editor):
jury_names = [
"Irina Demyanova",
"Carin Bräck",
"Pierre-Luc Vaillancourt",
]
JuryMember = apps.get_model('cpm_data.JuryMember')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
for name in jury_names:
jury_members = JuryMember.objects.filter(name_en=name).order_by('id')
original = jury_members[0]
duplicates = jury_members[1:]
for duplicate in duplicates:
qs = SeasonJuryMember.objects.filter(jury_member=duplicate)
qs.update(jury_member=original)
duplicate.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0013_update_jury_pavel_ivanov'),
]
operations = [
migrations.RunPython(
remove_duplicate_jury_members,
lambda apps, scema_editor: None
)
]
|
<commit_before><commit_msg>Add cpm_data migration getting rid of known jury duplicates<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def remove_duplicate_jury_members(apps, schema_editor):
jury_names = [
"Irina Demyanova",
"Carin Bräck",
"Pierre-Luc Vaillancourt",
]
JuryMember = apps.get_model('cpm_data.JuryMember')
SeasonJuryMember = apps.get_model('cpm_data.SeasonRelatedJuryMember')
for name in jury_names:
jury_members = JuryMember.objects.filter(name_en=name).order_by('id')
original = jury_members[0]
duplicates = jury_members[1:]
for duplicate in duplicates:
qs = SeasonJuryMember.objects.filter(jury_member=duplicate)
qs.update(jury_member=original)
duplicate.delete()
class Migration(migrations.Migration):
dependencies = [
('cpm_data', '0013_update_jury_pavel_ivanov'),
]
operations = [
migrations.RunPython(
remove_duplicate_jury_members,
lambda apps, scema_editor: None
)
]
|
|
20ed558baf9f28a96e6f3c1e58b30e5af4705018
|
viewer.py
|
viewer.py
|
import models
from optparse import OptionParser
def show_topicmodel_topic():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
number = raw_input("Please Input the number : ")
try:
number = int(number)
except:
print "ERROR"
continue
if number==-1:
break
res = m.find_nearest_word(number)
res = res[:30]
print "==================================="
for word, cnt in res:
print word, cnt
print "==================================="
def show_topicmodel_word():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
word = raw_input("Please Input the word : ")
if word=='EXIT':
break
res = m.find_nearest_topic(word)[:30]
print "===================================="
for topic, cnt in res:
print topic, cnt
print "===================================="
def show_twe():
print "This is TWE"
function_map = {'Topic_topic':show_topicmodel_topic,
'Topic_word': show_topicmodel_word,
'TWE' : show_twe}
if __name__=='__main__':
#initialize the optparse
parser = OptionParser()
parser.add_option("-m","--model",dest ='model',help='Please enter the model name: Topic_word,\
Topic_topic, TWE')
(options, args) = parser.parse_args()
model = options.model
if model not in function_map:
print "Error, Please enter the correct model name"
else:
func = function_map[model]
func()
|
Add the Knn Viewer of Topic Model
|
Add the Knn Viewer of Topic Model
|
Python
|
mit
|
largelymfs/w2vtools,largelymfs/w2vtools,largelymfs/w2vtools,largelymfs/w2vtools,largelymfs/w2vtools
|
Add the Knn Viewer of Topic Model
|
import models
from optparse import OptionParser
def show_topicmodel_topic():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
number = raw_input("Please Input the number : ")
try:
number = int(number)
except:
print "ERROR"
continue
if number==-1:
break
res = m.find_nearest_word(number)
res = res[:30]
print "==================================="
for word, cnt in res:
print word, cnt
print "==================================="
def show_topicmodel_word():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
word = raw_input("Please Input the word : ")
if word=='EXIT':
break
res = m.find_nearest_topic(word)[:30]
print "===================================="
for topic, cnt in res:
print topic, cnt
print "===================================="
def show_twe():
print "This is TWE"
function_map = {'Topic_topic':show_topicmodel_topic,
'Topic_word': show_topicmodel_word,
'TWE' : show_twe}
if __name__=='__main__':
#initialize the optparse
parser = OptionParser()
parser.add_option("-m","--model",dest ='model',help='Please enter the model name: Topic_word,\
Topic_topic, TWE')
(options, args) = parser.parse_args()
model = options.model
if model not in function_map:
print "Error, Please enter the correct model name"
else:
func = function_map[model]
func()
|
<commit_before><commit_msg>Add the Knn Viewer of Topic Model<commit_after>
|
import models
from optparse import OptionParser
def show_topicmodel_topic():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
number = raw_input("Please Input the number : ")
try:
number = int(number)
except:
print "ERROR"
continue
if number==-1:
break
res = m.find_nearest_word(number)
res = res[:30]
print "==================================="
for word, cnt in res:
print word, cnt
print "==================================="
def show_topicmodel_word():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
word = raw_input("Please Input the word : ")
if word=='EXIT':
break
res = m.find_nearest_topic(word)[:30]
print "===================================="
for topic, cnt in res:
print topic, cnt
print "===================================="
def show_twe():
print "This is TWE"
function_map = {'Topic_topic':show_topicmodel_topic,
'Topic_word': show_topicmodel_word,
'TWE' : show_twe}
if __name__=='__main__':
#initialize the optparse
parser = OptionParser()
parser.add_option("-m","--model",dest ='model',help='Please enter the model name: Topic_word,\
Topic_topic, TWE')
(options, args) = parser.parse_args()
model = options.model
if model not in function_map:
print "Error, Please enter the correct model name"
else:
func = function_map[model]
func()
|
Add the Knn Viewer of Topic Modelimport models
from optparse import OptionParser
def show_topicmodel_topic():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
number = raw_input("Please Input the number : ")
try:
number = int(number)
except:
print "ERROR"
continue
if number==-1:
break
res = m.find_nearest_word(number)
res = res[:30]
print "==================================="
for word, cnt in res:
print word, cnt
print "==================================="
def show_topicmodel_word():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
word = raw_input("Please Input the word : ")
if word=='EXIT':
break
res = m.find_nearest_topic(word)[:30]
print "===================================="
for topic, cnt in res:
print topic, cnt
print "===================================="
def show_twe():
print "This is TWE"
function_map = {'Topic_topic':show_topicmodel_topic,
'Topic_word': show_topicmodel_word,
'TWE' : show_twe}
if __name__=='__main__':
#initialize the optparse
parser = OptionParser()
parser.add_option("-m","--model",dest ='model',help='Please enter the model name: Topic_word,\
Topic_topic, TWE')
(options, args) = parser.parse_args()
model = options.model
if model not in function_map:
print "Error, Please enter the correct model name"
else:
func = function_map[model]
func()
|
<commit_before><commit_msg>Add the Knn Viewer of Topic Model<commit_after>import models
from optparse import OptionParser
def show_topicmodel_topic():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
number = raw_input("Please Input the number : ")
try:
number = int(number)
except:
print "ERROR"
continue
if number==-1:
break
res = m.find_nearest_word(number)
res = res[:30]
print "==================================="
for word, cnt in res:
print word, cnt
print "==================================="
def show_topicmodel_word():
modelfile = "./../mvtwe/Dataset/model-final.tassign"
wordmapfile = "./../mvtwe/Dataset/wordmap.txt"
m = models.TopicModel(wordmapfile, modelfile)
while True:
word = raw_input("Please Input the word : ")
if word=='EXIT':
break
res = m.find_nearest_topic(word)[:30]
print "===================================="
for topic, cnt in res:
print topic, cnt
print "===================================="
def show_twe():
print "This is TWE"
function_map = {'Topic_topic':show_topicmodel_topic,
'Topic_word': show_topicmodel_word,
'TWE' : show_twe}
if __name__=='__main__':
#initialize the optparse
parser = OptionParser()
parser.add_option("-m","--model",dest ='model',help='Please enter the model name: Topic_word,\
Topic_topic, TWE')
(options, args) = parser.parse_args()
model = options.model
if model not in function_map:
print "Error, Please enter the correct model name"
else:
func = function_map[model]
func()
|
|
142f14dd5cdf68d56216a44f4687f2f61d26a05a
|
erpnext/patches/v7_0/update_missing_employee_in_timesheet.py
|
erpnext/patches/v7_0/update_missing_employee_in_timesheet.py
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log") and "employee" in frappe.db.get_table_columns("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
Migrate employee field to timesheet only if it exists in time log
|
Migrate employee field to timesheet only if it exists in time log
|
Python
|
agpl-3.0
|
indictranstech/erpnext,njmube/erpnext,indictranstech/erpnext,indictranstech/erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,indictranstech/erpnext,njmube/erpnext,geekroot/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,njmube/erpnext,geekroot/erpnext,gsnbng/erpnext,njmube/erpnext,geekroot/erpnext
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
Migrate employee field to timesheet only if it exists in time log
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log") and "employee" in frappe.db.get_table_columns("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
<commit_before>from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
<commit_msg>Migrate employee field to timesheet only if it exists in time log<commit_after>
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log") and "employee" in frappe.db.get_table_columns("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
Migrate employee field to timesheet only if it exists in time logfrom __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log") and "employee" in frappe.db.get_table_columns("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
<commit_before>from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
<commit_msg>Migrate employee field to timesheet only if it exists in time log<commit_after>from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log") and "employee" in frappe.db.get_table_columns("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
|
9b7108cea9179adfc13f1eccebf716a3ce3275ba
|
map_reader.py
|
map_reader.py
|
from math import floor, cos, sin
map_file = open("map.txt", 'r')
map_array = [[0 for x in range(800)] for x in range(533)]
linenum = 0
for line in map_file:
processed_line = line.split()
processed_line = [int(x) for x in processed_line]
map_array[linenum] = processed_line
linenum = linenum + 1
def getNearbyWalls(x,y,phi):
#map - 5 px = 1sm
#robot diametur 5.5 sm., so we need 2.75*5 = 14 px radius
ret_front = [] # [front1sm,front2sm,front3sm,front4sm,front5sm]
ret_left = [] # [left1sm,left2sm,left3sm,left4sm,left5sm]
ret_right = [] # [right1sm,right2sm,right3sm,right4sm,right5sm]
#See if we have wall 1,2,3,4,5 sm in front of us
for i in range(1,6):
katety = int(floor(cos(phi)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_front.append(1)
else:
ret_front.append(0)
#See if we have wall 1,2,3,4,5 sm right of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_right.append(1)
else:
ret_right.append(0)
#See if we have wall 1,2,3,4,5 sm left of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_left.append(1)
else:
ret_left.append(0)
return (ret_front, ret_right, ret_left)
|
Read map and detect walls based on distance
|
Read map and detect walls based on distance
|
Python
|
bsd-3-clause
|
XapaJIaMnu/iar,XapaJIaMnu/iar
|
Read map and detect walls based on distance
|
from math import floor, cos, sin
map_file = open("map.txt", 'r')
map_array = [[0 for x in range(800)] for x in range(533)]
linenum = 0
for line in map_file:
processed_line = line.split()
processed_line = [int(x) for x in processed_line]
map_array[linenum] = processed_line
linenum = linenum + 1
def getNearbyWalls(x,y,phi):
#map - 5 px = 1sm
#robot diametur 5.5 sm., so we need 2.75*5 = 14 px radius
ret_front = [] # [front1sm,front2sm,front3sm,front4sm,front5sm]
ret_left = [] # [left1sm,left2sm,left3sm,left4sm,left5sm]
ret_right = [] # [right1sm,right2sm,right3sm,right4sm,right5sm]
#See if we have wall 1,2,3,4,5 sm in front of us
for i in range(1,6):
katety = int(floor(cos(phi)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_front.append(1)
else:
ret_front.append(0)
#See if we have wall 1,2,3,4,5 sm right of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_right.append(1)
else:
ret_right.append(0)
#See if we have wall 1,2,3,4,5 sm left of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_left.append(1)
else:
ret_left.append(0)
return (ret_front, ret_right, ret_left)
|
<commit_before><commit_msg>Read map and detect walls based on distance<commit_after>
|
from math import floor, cos, sin
map_file = open("map.txt", 'r')
map_array = [[0 for x in range(800)] for x in range(533)]
linenum = 0
for line in map_file:
processed_line = line.split()
processed_line = [int(x) for x in processed_line]
map_array[linenum] = processed_line
linenum = linenum + 1
def getNearbyWalls(x,y,phi):
#map - 5 px = 1sm
#robot diametur 5.5 sm., so we need 2.75*5 = 14 px radius
ret_front = [] # [front1sm,front2sm,front3sm,front4sm,front5sm]
ret_left = [] # [left1sm,left2sm,left3sm,left4sm,left5sm]
ret_right = [] # [right1sm,right2sm,right3sm,right4sm,right5sm]
#See if we have wall 1,2,3,4,5 sm in front of us
for i in range(1,6):
katety = int(floor(cos(phi)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_front.append(1)
else:
ret_front.append(0)
#See if we have wall 1,2,3,4,5 sm right of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_right.append(1)
else:
ret_right.append(0)
#See if we have wall 1,2,3,4,5 sm left of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_left.append(1)
else:
ret_left.append(0)
return (ret_front, ret_right, ret_left)
|
Read map and detect walls based on distancefrom math import floor, cos, sin
map_file = open("map.txt", 'r')
map_array = [[0 for x in range(800)] for x in range(533)]
linenum = 0
for line in map_file:
processed_line = line.split()
processed_line = [int(x) for x in processed_line]
map_array[linenum] = processed_line
linenum = linenum + 1
def getNearbyWalls(x,y,phi):
#map - 5 px = 1sm
#robot diametur 5.5 sm., so we need 2.75*5 = 14 px radius
ret_front = [] # [front1sm,front2sm,front3sm,front4sm,front5sm]
ret_left = [] # [left1sm,left2sm,left3sm,left4sm,left5sm]
ret_right = [] # [right1sm,right2sm,right3sm,right4sm,right5sm]
#See if we have wall 1,2,3,4,5 sm in front of us
for i in range(1,6):
katety = int(floor(cos(phi)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_front.append(1)
else:
ret_front.append(0)
#See if we have wall 1,2,3,4,5 sm right of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_right.append(1)
else:
ret_right.append(0)
#See if we have wall 1,2,3,4,5 sm left of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_left.append(1)
else:
ret_left.append(0)
return (ret_front, ret_right, ret_left)
|
<commit_before><commit_msg>Read map and detect walls based on distance<commit_after>from math import floor, cos, sin
map_file = open("map.txt", 'r')
map_array = [[0 for x in range(800)] for x in range(533)]
linenum = 0
for line in map_file:
processed_line = line.split()
processed_line = [int(x) for x in processed_line]
map_array[linenum] = processed_line
linenum = linenum + 1
def getNearbyWalls(x,y,phi):
#map - 5 px = 1sm
#robot diametur 5.5 sm., so we need 2.75*5 = 14 px radius
ret_front = [] # [front1sm,front2sm,front3sm,front4sm,front5sm]
ret_left = [] # [left1sm,left2sm,left3sm,left4sm,left5sm]
ret_right = [] # [right1sm,right2sm,right3sm,right4sm,right5sm]
#See if we have wall 1,2,3,4,5 sm in front of us
for i in range(1,6):
katety = int(floor(cos(phi)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_front.append(1)
else:
ret_front.append(0)
#See if we have wall 1,2,3,4,5 sm right of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_right.append(1)
else:
ret_right.append(0)
#See if we have wall 1,2,3,4,5 sm left of us
for i in range(1,6):
katety = int(floor(cos(phi-90)/14 + i*5)) #14 pixels robot size + 55 pixels ahead of it
katetx = int(floor(sin(phi-90)/14 + i*5))
if (map_array[katetx][katety] == 1):
ret_left.append(1)
else:
ret_left.append(0)
return (ret_front, ret_right, ret_left)
|
|
1a070f16c1649c1bc332101098b35ab7322c7827
|
problem_31.py
|
problem_31.py
|
from time import time
TARGET_SUM = 200
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
DYNAMIC_TABLE = {}
def calculate(point, coinset):
if point - coinset[0] < 0:
return 0
elif point == coinset[0]:
return 1
else:
if (point, str(coinset)) in DYNAMIC_TABLE:
return DYNAMIC_TABLE[(point, str(coinset))]
DYNAMIC_TABLE[(point, str(coinset))] = calculate(point-coinset[0], coinset) + calculate(point, coinset[1:])
return DYNAMIC_TABLE[(point, str(coinset))]
t = time()
print calculate(TARGET_SUM, COINS)
print 'Time:', time()-t
|
Add problem 31, coin dynamic programming
|
Add problem 31, coin dynamic programming
|
Python
|
mit
|
dimkarakostas/project-euler
|
Add problem 31, coin dynamic programming
|
from time import time
TARGET_SUM = 200
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
DYNAMIC_TABLE = {}
def calculate(point, coinset):
if point - coinset[0] < 0:
return 0
elif point == coinset[0]:
return 1
else:
if (point, str(coinset)) in DYNAMIC_TABLE:
return DYNAMIC_TABLE[(point, str(coinset))]
DYNAMIC_TABLE[(point, str(coinset))] = calculate(point-coinset[0], coinset) + calculate(point, coinset[1:])
return DYNAMIC_TABLE[(point, str(coinset))]
t = time()
print calculate(TARGET_SUM, COINS)
print 'Time:', time()-t
|
<commit_before><commit_msg>Add problem 31, coin dynamic programming<commit_after>
|
from time import time
TARGET_SUM = 200
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
DYNAMIC_TABLE = {}
def calculate(point, coinset):
if point - coinset[0] < 0:
return 0
elif point == coinset[0]:
return 1
else:
if (point, str(coinset)) in DYNAMIC_TABLE:
return DYNAMIC_TABLE[(point, str(coinset))]
DYNAMIC_TABLE[(point, str(coinset))] = calculate(point-coinset[0], coinset) + calculate(point, coinset[1:])
return DYNAMIC_TABLE[(point, str(coinset))]
t = time()
print calculate(TARGET_SUM, COINS)
print 'Time:', time()-t
|
Add problem 31, coin dynamic programmingfrom time import time
TARGET_SUM = 200
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
DYNAMIC_TABLE = {}
def calculate(point, coinset):
if point - coinset[0] < 0:
return 0
elif point == coinset[0]:
return 1
else:
if (point, str(coinset)) in DYNAMIC_TABLE:
return DYNAMIC_TABLE[(point, str(coinset))]
DYNAMIC_TABLE[(point, str(coinset))] = calculate(point-coinset[0], coinset) + calculate(point, coinset[1:])
return DYNAMIC_TABLE[(point, str(coinset))]
t = time()
print calculate(TARGET_SUM, COINS)
print 'Time:', time()-t
|
<commit_before><commit_msg>Add problem 31, coin dynamic programming<commit_after>from time import time
TARGET_SUM = 200
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
DYNAMIC_TABLE = {}
def calculate(point, coinset):
if point - coinset[0] < 0:
return 0
elif point == coinset[0]:
return 1
else:
if (point, str(coinset)) in DYNAMIC_TABLE:
return DYNAMIC_TABLE[(point, str(coinset))]
DYNAMIC_TABLE[(point, str(coinset))] = calculate(point-coinset[0], coinset) + calculate(point, coinset[1:])
return DYNAMIC_TABLE[(point, str(coinset))]
t = time()
print calculate(TARGET_SUM, COINS)
print 'Time:', time()-t
|
|
73c9180d39dd4f8204c4f8df4a825776b0dcb9e5
|
radix_sort.py
|
radix_sort.py
|
def int_radix(mylist):
'''
Sorts a list of ints by looking at the 1's place,
the 10's, 100's, etc.
'''
for i in mylist:
if (type(i) != int) or (i < 0):
return 'Please input a valid list'
if len(mylist) > 0:
max_list = max(mylist)
tens = 1
num_buckets = []
for i in range(10):
num_buckets.append([])
while max_list >= tens:
for item in mylist:
bucket = (item % (tens * 10) / tens)
num_buckets[bucket].append(item)
buff = []
for bucket in num_buckets:
while len(bucket) > 0:
buff.append(bucket.pop(0))
mylist = buff
tens *= 10
return mylist
|
Add sort method for ints
|
Add sort method for ints
|
Python
|
mit
|
nbeck90/data_structures_2
|
Add sort method for ints
|
def int_radix(mylist):
'''
Sorts a list of ints by looking at the 1's place,
the 10's, 100's, etc.
'''
for i in mylist:
if (type(i) != int) or (i < 0):
return 'Please input a valid list'
if len(mylist) > 0:
max_list = max(mylist)
tens = 1
num_buckets = []
for i in range(10):
num_buckets.append([])
while max_list >= tens:
for item in mylist:
bucket = (item % (tens * 10) / tens)
num_buckets[bucket].append(item)
buff = []
for bucket in num_buckets:
while len(bucket) > 0:
buff.append(bucket.pop(0))
mylist = buff
tens *= 10
return mylist
|
<commit_before><commit_msg>Add sort method for ints<commit_after>
|
def int_radix(mylist):
'''
Sorts a list of ints by looking at the 1's place,
the 10's, 100's, etc.
'''
for i in mylist:
if (type(i) != int) or (i < 0):
return 'Please input a valid list'
if len(mylist) > 0:
max_list = max(mylist)
tens = 1
num_buckets = []
for i in range(10):
num_buckets.append([])
while max_list >= tens:
for item in mylist:
bucket = (item % (tens * 10) / tens)
num_buckets[bucket].append(item)
buff = []
for bucket in num_buckets:
while len(bucket) > 0:
buff.append(bucket.pop(0))
mylist = buff
tens *= 10
return mylist
|
Add sort method for intsdef int_radix(mylist):
'''
Sorts a list of ints by looking at the 1's place,
the 10's, 100's, etc.
'''
for i in mylist:
if (type(i) != int) or (i < 0):
return 'Please input a valid list'
if len(mylist) > 0:
max_list = max(mylist)
tens = 1
num_buckets = []
for i in range(10):
num_buckets.append([])
while max_list >= tens:
for item in mylist:
bucket = (item % (tens * 10) / tens)
num_buckets[bucket].append(item)
buff = []
for bucket in num_buckets:
while len(bucket) > 0:
buff.append(bucket.pop(0))
mylist = buff
tens *= 10
return mylist
|
<commit_before><commit_msg>Add sort method for ints<commit_after>def int_radix(mylist):
'''
Sorts a list of ints by looking at the 1's place,
the 10's, 100's, etc.
'''
for i in mylist:
if (type(i) != int) or (i < 0):
return 'Please input a valid list'
if len(mylist) > 0:
max_list = max(mylist)
tens = 1
num_buckets = []
for i in range(10):
num_buckets.append([])
while max_list >= tens:
for item in mylist:
bucket = (item % (tens * 10) / tens)
num_buckets[bucket].append(item)
buff = []
for bucket in num_buckets:
while len(bucket) > 0:
buff.append(bucket.pop(0))
mylist = buff
tens *= 10
return mylist
|
|
a252cec4cf977d5c27509e9b282c5901ad89a522
|
tensorflow_io/hadoop/python/ops/hadoop_dataset_ops.py
|
tensorflow_io/hadoop/python/ops/hadoop_dataset_ops.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.hadoop.python.ops import hadoop_op_loader # pylint: disable=unused-import
from tensorflow.contrib.hadoop.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class SequenceFileDataset(Dataset):
"""A Sequence File Dataset that reads the sequence file."""
def __init__(self, filenames, output_types=(dtypes.string, dtypes.string)):
"""Create a `SequenceFileDataset`.
`SequenceFileDataset` allows a user to read data from a hadoop sequence
file. A sequence file consists of (key value) pairs sequentially. At
the moment, `org.apache.hadoop.io.Text` is the only serialization type
being supported, and there is no compression support.
For example:
```python
dataset = tf.contrib.hadoop.SequenceFileDataset(
"/foo/bar.seq", (tf.string, tf.string))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the (key, value) pairs inside a hadoop sequence file.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
filenames: A `tf.string` tensor containing one or more filenames.
output_types: A tuple of `tf.DType` objects representing the types of the
key-value pairs returned. Only `(tf.string, tf.string)` is supported
at the moment.
"""
super(SequenceFileDataset, self).__init__()
self._filenames = ops.convert_to_tensor(
filenames, dtype=dtypes.string, name="filenames")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sequence_file_dataset(
self._filenames, nest.flatten(self.output_types))
@property
def output_classes(self):
return nest.map_structure(lambda _: ops.Tensor, self._output_types)
@property
def output_shapes(self):
return (tensor_shape.TensorShape([]), tensor_shape.TensorShape([]))
@property
def output_types(self):
return self._output_types
|
Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
|
Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
Signed-off-by: Yong Tang <765086fe2e0c1f980161f127fec596800f327f62@outlook.com>
|
Python
|
apache-2.0
|
tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io
|
Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
Signed-off-by: Yong Tang <765086fe2e0c1f980161f127fec596800f327f62@outlook.com>
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.hadoop.python.ops import hadoop_op_loader # pylint: disable=unused-import
from tensorflow.contrib.hadoop.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class SequenceFileDataset(Dataset):
"""A Sequence File Dataset that reads the sequence file."""
def __init__(self, filenames, output_types=(dtypes.string, dtypes.string)):
"""Create a `SequenceFileDataset`.
`SequenceFileDataset` allows a user to read data from a hadoop sequence
file. A sequence file consists of (key value) pairs sequentially. At
the moment, `org.apache.hadoop.io.Text` is the only serialization type
being supported, and there is no compression support.
For example:
```python
dataset = tf.contrib.hadoop.SequenceFileDataset(
"/foo/bar.seq", (tf.string, tf.string))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the (key, value) pairs inside a hadoop sequence file.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
filenames: A `tf.string` tensor containing one or more filenames.
output_types: A tuple of `tf.DType` objects representing the types of the
key-value pairs returned. Only `(tf.string, tf.string)` is supported
at the moment.
"""
super(SequenceFileDataset, self).__init__()
self._filenames = ops.convert_to_tensor(
filenames, dtype=dtypes.string, name="filenames")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sequence_file_dataset(
self._filenames, nest.flatten(self.output_types))
@property
def output_classes(self):
return nest.map_structure(lambda _: ops.Tensor, self._output_types)
@property
def output_shapes(self):
return (tensor_shape.TensorShape([]), tensor_shape.TensorShape([]))
@property
def output_types(self):
return self._output_types
|
<commit_before><commit_msg>Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
Signed-off-by: Yong Tang <765086fe2e0c1f980161f127fec596800f327f62@outlook.com><commit_after>
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.hadoop.python.ops import hadoop_op_loader # pylint: disable=unused-import
from tensorflow.contrib.hadoop.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class SequenceFileDataset(Dataset):
"""A Sequence File Dataset that reads the sequence file."""
def __init__(self, filenames, output_types=(dtypes.string, dtypes.string)):
"""Create a `SequenceFileDataset`.
`SequenceFileDataset` allows a user to read data from a hadoop sequence
file. A sequence file consists of (key value) pairs sequentially. At
the moment, `org.apache.hadoop.io.Text` is the only serialization type
being supported, and there is no compression support.
For example:
```python
dataset = tf.contrib.hadoop.SequenceFileDataset(
"/foo/bar.seq", (tf.string, tf.string))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the (key, value) pairs inside a hadoop sequence file.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
filenames: A `tf.string` tensor containing one or more filenames.
output_types: A tuple of `tf.DType` objects representing the types of the
key-value pairs returned. Only `(tf.string, tf.string)` is supported
at the moment.
"""
super(SequenceFileDataset, self).__init__()
self._filenames = ops.convert_to_tensor(
filenames, dtype=dtypes.string, name="filenames")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sequence_file_dataset(
self._filenames, nest.flatten(self.output_types))
@property
def output_classes(self):
return nest.map_structure(lambda _: ops.Tensor, self._output_types)
@property
def output_shapes(self):
return (tensor_shape.TensorShape([]), tensor_shape.TensorShape([]))
@property
def output_types(self):
return self._output_types
|
Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
Signed-off-by: Yong Tang <765086fe2e0c1f980161f127fec596800f327f62@outlook.com># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.hadoop.python.ops import hadoop_op_loader # pylint: disable=unused-import
from tensorflow.contrib.hadoop.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class SequenceFileDataset(Dataset):
"""A Sequence File Dataset that reads the sequence file."""
def __init__(self, filenames, output_types=(dtypes.string, dtypes.string)):
"""Create a `SequenceFileDataset`.
`SequenceFileDataset` allows a user to read data from a hadoop sequence
file. A sequence file consists of (key value) pairs sequentially. At
the moment, `org.apache.hadoop.io.Text` is the only serialization type
being supported, and there is no compression support.
For example:
```python
dataset = tf.contrib.hadoop.SequenceFileDataset(
"/foo/bar.seq", (tf.string, tf.string))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the (key, value) pairs inside a hadoop sequence file.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
filenames: A `tf.string` tensor containing one or more filenames.
output_types: A tuple of `tf.DType` objects representing the types of the
key-value pairs returned. Only `(tf.string, tf.string)` is supported
at the moment.
"""
super(SequenceFileDataset, self).__init__()
self._filenames = ops.convert_to_tensor(
filenames, dtype=dtypes.string, name="filenames")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sequence_file_dataset(
self._filenames, nest.flatten(self.output_types))
@property
def output_classes(self):
return nest.map_structure(lambda _: ops.Tensor, self._output_types)
@property
def output_shapes(self):
return (tensor_shape.TensorShape([]), tensor_shape.TensorShape([]))
@property
def output_types(self):
return self._output_types
|
<commit_before><commit_msg>Add python wrapper for tf.contrib.hadoop.SequenceFileDataset
Signed-off-by: Yong Tang <765086fe2e0c1f980161f127fec596800f327f62@outlook.com><commit_after># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SequenceFile Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.hadoop.python.ops import hadoop_op_loader # pylint: disable=unused-import
from tensorflow.contrib.hadoop.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class SequenceFileDataset(Dataset):
"""A Sequence File Dataset that reads the sequence file."""
def __init__(self, filenames, output_types=(dtypes.string, dtypes.string)):
"""Create a `SequenceFileDataset`.
`SequenceFileDataset` allows a user to read data from a hadoop sequence
file. A sequence file consists of (key value) pairs sequentially. At
the moment, `org.apache.hadoop.io.Text` is the only serialization type
being supported, and there is no compression support.
For example:
```python
dataset = tf.contrib.hadoop.SequenceFileDataset(
"/foo/bar.seq", (tf.string, tf.string))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the (key, value) pairs inside a hadoop sequence file.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
filenames: A `tf.string` tensor containing one or more filenames.
output_types: A tuple of `tf.DType` objects representing the types of the
key-value pairs returned. Only `(tf.string, tf.string)` is supported
at the moment.
"""
super(SequenceFileDataset, self).__init__()
self._filenames = ops.convert_to_tensor(
filenames, dtype=dtypes.string, name="filenames")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sequence_file_dataset(
self._filenames, nest.flatten(self.output_types))
@property
def output_classes(self):
return nest.map_structure(lambda _: ops.Tensor, self._output_types)
@property
def output_shapes(self):
return (tensor_shape.TensorShape([]), tensor_shape.TensorShape([]))
@property
def output_types(self):
return self._output_types
|
|
e14a53e0a3942ab021977301a5e08a016a2218cf
|
adminrestrict/signals.py
|
adminrestrict/signals.py
|
"""
adminrestrict signals
"""
__author__ = "Robert Romano"
__copyright__ = "Copyright 2020 Robert C. Romano"
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from adminrestrict.models import AllowedIP
from adminrestrict.middleware import AdminPagesRestrictMiddleware
@receiver(post_save, sender=AllowedIP)
def allowed_ip_saved(sender, instance, created, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
@receiver(post_delete, sender=AllowedIP)
def allowed_ip_deleted(sender, instance, using, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
|
Add signal receivers for post-save and delete
|
Add signal receivers for post-save and delete
|
Python
|
mit
|
robromano/django-adminrestrict
|
Add signal receivers for post-save and delete
|
"""
adminrestrict signals
"""
__author__ = "Robert Romano"
__copyright__ = "Copyright 2020 Robert C. Romano"
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from adminrestrict.models import AllowedIP
from adminrestrict.middleware import AdminPagesRestrictMiddleware
@receiver(post_save, sender=AllowedIP)
def allowed_ip_saved(sender, instance, created, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
@receiver(post_delete, sender=AllowedIP)
def allowed_ip_deleted(sender, instance, using, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
|
<commit_before><commit_msg>Add signal receivers for post-save and delete<commit_after>
|
"""
adminrestrict signals
"""
__author__ = "Robert Romano"
__copyright__ = "Copyright 2020 Robert C. Romano"
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from adminrestrict.models import AllowedIP
from adminrestrict.middleware import AdminPagesRestrictMiddleware
@receiver(post_save, sender=AllowedIP)
def allowed_ip_saved(sender, instance, created, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
@receiver(post_delete, sender=AllowedIP)
def allowed_ip_deleted(sender, instance, using, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
|
Add signal receivers for post-save and delete"""
adminrestrict signals
"""
__author__ = "Robert Romano"
__copyright__ = "Copyright 2020 Robert C. Romano"
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from adminrestrict.models import AllowedIP
from adminrestrict.middleware import AdminPagesRestrictMiddleware
@receiver(post_save, sender=AllowedIP)
def allowed_ip_saved(sender, instance, created, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
@receiver(post_delete, sender=AllowedIP)
def allowed_ip_deleted(sender, instance, using, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
|
<commit_before><commit_msg>Add signal receivers for post-save and delete<commit_after>"""
adminrestrict signals
"""
__author__ = "Robert Romano"
__copyright__ = "Copyright 2020 Robert C. Romano"
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from adminrestrict.models import AllowedIP
from adminrestrict.middleware import AdminPagesRestrictMiddleware
@receiver(post_save, sender=AllowedIP)
def allowed_ip_saved(sender, instance, created, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
@receiver(post_delete, sender=AllowedIP)
def allowed_ip_deleted(sender, instance, using, **kwargs):
AdminPagesRestrictMiddleware._invalidate_cache = True
|
|
22156f488fba737a230eae876817afb497347c4c
|
bamova/predict_loci.py
|
bamova/predict_loci.py
|
import sys
import numpy as np
def predict_loci(phi_values, cutoff_percent):
average_phi_values = np.mean(axis=1)
sortable = []
for loci, phi in enumerate(average_phi_values)
sortable.append((phi, loci))
sortable.sort()
sortable.reverse()
cutoff_idx = int(len(sortable) * cutoff_percent)
return sortable[:cutoff_idx]
def write_predicted(flname, predicted_loci):
fl = open(flname, "w")
for phi, loci in predicted_loci:
fl.write("%s %s\n" % (loci, phi))
fl.close()
if __name__ == "__main__":
npy_flname = sys.argv[1]
cutoff_percent = float(sys.argv[2])
predicted_flname = sys.argv[3]
phi_values = np.load(npy_flname)
predicted_loci = sort_by_average_phi(phi_values)
write_predicted(predicted_flname, predicted_loci)
|
Add script for predicing loci from bamova
|
Add script for predicing loci from bamova
|
Python
|
apache-2.0
|
rnowling/pop-gen-models
|
Add script for predicing loci from bamova
|
import sys
import numpy as np
def predict_loci(phi_values, cutoff_percent):
average_phi_values = np.mean(axis=1)
sortable = []
for loci, phi in enumerate(average_phi_values)
sortable.append((phi, loci))
sortable.sort()
sortable.reverse()
cutoff_idx = int(len(sortable) * cutoff_percent)
return sortable[:cutoff_idx]
def write_predicted(flname, predicted_loci):
fl = open(flname, "w")
for phi, loci in predicted_loci:
fl.write("%s %s\n" % (loci, phi))
fl.close()
if __name__ == "__main__":
npy_flname = sys.argv[1]
cutoff_percent = float(sys.argv[2])
predicted_flname = sys.argv[3]
phi_values = np.load(npy_flname)
predicted_loci = sort_by_average_phi(phi_values)
write_predicted(predicted_flname, predicted_loci)
|
<commit_before><commit_msg>Add script for predicing loci from bamova<commit_after>
|
import sys
import numpy as np
def predict_loci(phi_values, cutoff_percent):
average_phi_values = np.mean(axis=1)
sortable = []
for loci, phi in enumerate(average_phi_values)
sortable.append((phi, loci))
sortable.sort()
sortable.reverse()
cutoff_idx = int(len(sortable) * cutoff_percent)
return sortable[:cutoff_idx]
def write_predicted(flname, predicted_loci):
fl = open(flname, "w")
for phi, loci in predicted_loci:
fl.write("%s %s\n" % (loci, phi))
fl.close()
if __name__ == "__main__":
npy_flname = sys.argv[1]
cutoff_percent = float(sys.argv[2])
predicted_flname = sys.argv[3]
phi_values = np.load(npy_flname)
predicted_loci = sort_by_average_phi(phi_values)
write_predicted(predicted_flname, predicted_loci)
|
Add script for predicing loci from bamovaimport sys
import numpy as np
def predict_loci(phi_values, cutoff_percent):
average_phi_values = np.mean(axis=1)
sortable = []
for loci, phi in enumerate(average_phi_values)
sortable.append((phi, loci))
sortable.sort()
sortable.reverse()
cutoff_idx = int(len(sortable) * cutoff_percent)
return sortable[:cutoff_idx]
def write_predicted(flname, predicted_loci):
fl = open(flname, "w")
for phi, loci in predicted_loci:
fl.write("%s %s\n" % (loci, phi))
fl.close()
if __name__ == "__main__":
npy_flname = sys.argv[1]
cutoff_percent = float(sys.argv[2])
predicted_flname = sys.argv[3]
phi_values = np.load(npy_flname)
predicted_loci = sort_by_average_phi(phi_values)
write_predicted(predicted_flname, predicted_loci)
|
<commit_before><commit_msg>Add script for predicing loci from bamova<commit_after>import sys
import numpy as np
def predict_loci(phi_values, cutoff_percent):
average_phi_values = np.mean(axis=1)
sortable = []
for loci, phi in enumerate(average_phi_values)
sortable.append((phi, loci))
sortable.sort()
sortable.reverse()
cutoff_idx = int(len(sortable) * cutoff_percent)
return sortable[:cutoff_idx]
def write_predicted(flname, predicted_loci):
fl = open(flname, "w")
for phi, loci in predicted_loci:
fl.write("%s %s\n" % (loci, phi))
fl.close()
if __name__ == "__main__":
npy_flname = sys.argv[1]
cutoff_percent = float(sys.argv[2])
predicted_flname = sys.argv[3]
phi_values = np.load(npy_flname)
predicted_loci = sort_by_average_phi(phi_values)
write_predicted(predicted_flname, predicted_loci)
|
|
2d421abd4987ec9d0d4c5e55805aab2d7fc84359
|
packages/adminrouter/extra/src/test-harness/tests/test_metrics.py
|
packages/adminrouter/extra/src/test-harness/tests/test_metrics.py
|
import requests
import urllib.parse
class TestMetrics:
def test_metrics_html(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/status returns metrics in HTML format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/status')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/html'
def test_metrics_prometheus(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics returns metrics in Prometheus format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
assert resp.text.startswith('# HELP nginx_vts_info Nginx info')
def test_metrics_prometheus_escape(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics escapes Prometheus format correctly.
"""
# https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md#text-format-details
# "label_value can be any sequence of UTF-8 characters, but the backslash
# (\, double-quote ("}, and line feed (\n) characters have to be escaped
# as \\, \", and \n, respectively."
# Add \t for tab as well, to show that is passes through unescaped
url_path = urllib.parse.quote('/service/monitoring/gra"f\\a\nn\ta')
url = master_ar_process_pertest.make_url_from_path(url_path)
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
# DCOS-50265 swaps the truth of the following two asserts:
# not escaped:
assert '/service/monitoring/gra"f\\a\nn\ta' in resp.text
# correctly escaped:
assert '/service/monitoring/gra\\"f\\\\a\\nn\ta' not in resp.text
|
Add some tests for metrics
|
Add some tests for metrics
|
Python
|
apache-2.0
|
dcos/dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/mergebot-test-dcos,mesosphere-mergebot/dcos,mesosphere-mergebot/dcos,dcos/dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos,dcos/dcos,kensipe/dcos,kensipe/dcos,kensipe/dcos,GoelDeepak/dcos,GoelDeepak/dcos,dcos/dcos,dcos/dcos,kensipe/dcos,mesosphere-mergebot/mergebot-test-dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos
|
Add some tests for metrics
|
import requests
import urllib.parse
class TestMetrics:
def test_metrics_html(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/status returns metrics in HTML format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/status')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/html'
def test_metrics_prometheus(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics returns metrics in Prometheus format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
assert resp.text.startswith('# HELP nginx_vts_info Nginx info')
def test_metrics_prometheus_escape(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics escapes Prometheus format correctly.
"""
# https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md#text-format-details
# "label_value can be any sequence of UTF-8 characters, but the backslash
# (\, double-quote ("}, and line feed (\n) characters have to be escaped
# as \\, \", and \n, respectively."
# Add \t for tab as well, to show that is passes through unescaped
url_path = urllib.parse.quote('/service/monitoring/gra"f\\a\nn\ta')
url = master_ar_process_pertest.make_url_from_path(url_path)
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
# DCOS-50265 swaps the truth of the following two asserts:
# not escaped:
assert '/service/monitoring/gra"f\\a\nn\ta' in resp.text
# correctly escaped:
assert '/service/monitoring/gra\\"f\\\\a\\nn\ta' not in resp.text
|
<commit_before><commit_msg>Add some tests for metrics<commit_after>
|
import requests
import urllib.parse
class TestMetrics:
def test_metrics_html(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/status returns metrics in HTML format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/status')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/html'
def test_metrics_prometheus(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics returns metrics in Prometheus format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
assert resp.text.startswith('# HELP nginx_vts_info Nginx info')
def test_metrics_prometheus_escape(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics escapes Prometheus format correctly.
"""
# https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md#text-format-details
# "label_value can be any sequence of UTF-8 characters, but the backslash
# (\, double-quote ("}, and line feed (\n) characters have to be escaped
# as \\, \", and \n, respectively."
# Add \t for tab as well, to show that is passes through unescaped
url_path = urllib.parse.quote('/service/monitoring/gra"f\\a\nn\ta')
url = master_ar_process_pertest.make_url_from_path(url_path)
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
# DCOS-50265 swaps the truth of the following two asserts:
# not escaped:
assert '/service/monitoring/gra"f\\a\nn\ta' in resp.text
# correctly escaped:
assert '/service/monitoring/gra\\"f\\\\a\\nn\ta' not in resp.text
|
Add some tests for metricsimport requests
import urllib.parse
class TestMetrics:
def test_metrics_html(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/status returns metrics in HTML format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/status')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/html'
def test_metrics_prometheus(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics returns metrics in Prometheus format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
assert resp.text.startswith('# HELP nginx_vts_info Nginx info')
def test_metrics_prometheus_escape(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics escapes Prometheus format correctly.
"""
# https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md#text-format-details
# "label_value can be any sequence of UTF-8 characters, but the backslash
# (\, double-quote ("}, and line feed (\n) characters have to be escaped
# as \\, \", and \n, respectively."
# Add \t for tab as well, to show that is passes through unescaped
url_path = urllib.parse.quote('/service/monitoring/gra"f\\a\nn\ta')
url = master_ar_process_pertest.make_url_from_path(url_path)
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
# DCOS-50265 swaps the truth of the following two asserts:
# not escaped:
assert '/service/monitoring/gra"f\\a\nn\ta' in resp.text
# correctly escaped:
assert '/service/monitoring/gra\\"f\\\\a\\nn\ta' not in resp.text
|
<commit_before><commit_msg>Add some tests for metrics<commit_after>import requests
import urllib.parse
class TestMetrics:
def test_metrics_html(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/status returns metrics in HTML format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/status')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/html'
def test_metrics_prometheus(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics returns metrics in Prometheus format
"""
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
assert resp.text.startswith('# HELP nginx_vts_info Nginx info')
def test_metrics_prometheus_escape(self, master_ar_process_pertest, valid_user_header):
"""
/nginx/metrics escapes Prometheus format correctly.
"""
# https://github.com/prometheus/docs/blob/master/content/docs/instrumenting/exposition_formats.md#text-format-details
# "label_value can be any sequence of UTF-8 characters, but the backslash
# (\, double-quote ("}, and line feed (\n) characters have to be escaped
# as \\, \", and \n, respectively."
# Add \t for tab as well, to show that is passes through unescaped
url_path = urllib.parse.quote('/service/monitoring/gra"f\\a\nn\ta')
url = master_ar_process_pertest.make_url_from_path(url_path)
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
url = master_ar_process_pertest.make_url_from_path('/nginx/metrics')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert resp.headers['Content-Type'] == 'text/plain'
# DCOS-50265 swaps the truth of the following two asserts:
# not escaped:
assert '/service/monitoring/gra"f\\a\nn\ta' in resp.text
# correctly escaped:
assert '/service/monitoring/gra\\"f\\\\a\\nn\ta' not in resp.text
|
|
df43706b287dd7559bf5abc249cc83028f3ba6b1
|
paramsweep.py
|
paramsweep.py
|
#!/usr/bin/env python
"""
Run multiple simulations varying a single parameter.
"""
import numpy as np
from subprocess import call
import os
import pandas as pd
import argparse
def read_force_coeffs():
"""Read force coefficients from `postProcessing/actuatorLines`."""
df = pd.read_csv("postProcessing/actuatorLines/0/foil.csv")
df = df.iloc[-1]
return df[["time", "rel_vel_mag", "alpha_geom_deg", "alpha_deg", "cl",
"cd", "cm"]]
def read_turbulence_fields():
"""Dummy function for now."""
return {"k": np.nan, "omega": np.nan, "epsilon": np.nan, "nut": np.nan,
"z_turbulence": np.nan}
def alpha_sweep(start, stop, step, append=False):
"""Vary the foil angle of attack and log results."""
alpha_list = np.arange(start, stop, step)
df_fname = "processed/alpha_sweep.csv"
if append:
df = pd.read_csv(df_fname)
else:
df = pd.DataFrame(columns=["time", "rel_vel_mag", "alpha_geom_deg",
"alpha_deg", "cl", "cd", "cm", "k", "omega",
"epsilon", "nut", "z_turbulence"])
for alpha in alpha_list:
call("./Allclean")
call(["./Allrun", str(alpha)])
d = dict(read_force_coeffs())
d.update(read_turbulence_fields())
df = df.append(d, ignore_index=True)
df.to_csv(df_fname, index=False)
if __name__ == "__main__":
if not os.path.isdir("processed"):
os.mkdir("processed")
parser = argparse.ArgumentParser(description="Vary the foil angle of \
attack and log results.")
parser.add_argument("start", type=float, help="Start angle of sweep.",
nargs="?", default=-15.0)
parser.add_argument("stop", type=float, help="End angle of sweep. The sweep\
does not include this value.", nargs="?", default=15.0)
parser.add_argument("step", type=float, default=1.0, nargs="?",
help="Spacing between values.")
parser.add_argument("--append", "-a", action="store_true", default=False,
help="Append to previous results")
args = parser.parse_args()
alpha_sweep(args.start, args.stop, args.step, append=args.append)
|
Add script for running parameter sweep
|
Add script for running parameter sweep
|
Python
|
mit
|
petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam,petebachant/actuatorLine-2D-turbinesFoam
|
Add script for running parameter sweep
|
#!/usr/bin/env python
"""
Run multiple simulations varying a single parameter.
"""
import numpy as np
from subprocess import call
import os
import pandas as pd
import argparse
def read_force_coeffs():
"""Read force coefficients from `postProcessing/actuatorLines`."""
df = pd.read_csv("postProcessing/actuatorLines/0/foil.csv")
df = df.iloc[-1]
return df[["time", "rel_vel_mag", "alpha_geom_deg", "alpha_deg", "cl",
"cd", "cm"]]
def read_turbulence_fields():
"""Dummy function for now."""
return {"k": np.nan, "omega": np.nan, "epsilon": np.nan, "nut": np.nan,
"z_turbulence": np.nan}
def alpha_sweep(start, stop, step, append=False):
"""Vary the foil angle of attack and log results."""
alpha_list = np.arange(start, stop, step)
df_fname = "processed/alpha_sweep.csv"
if append:
df = pd.read_csv(df_fname)
else:
df = pd.DataFrame(columns=["time", "rel_vel_mag", "alpha_geom_deg",
"alpha_deg", "cl", "cd", "cm", "k", "omega",
"epsilon", "nut", "z_turbulence"])
for alpha in alpha_list:
call("./Allclean")
call(["./Allrun", str(alpha)])
d = dict(read_force_coeffs())
d.update(read_turbulence_fields())
df = df.append(d, ignore_index=True)
df.to_csv(df_fname, index=False)
if __name__ == "__main__":
if not os.path.isdir("processed"):
os.mkdir("processed")
parser = argparse.ArgumentParser(description="Vary the foil angle of \
attack and log results.")
parser.add_argument("start", type=float, help="Start angle of sweep.",
nargs="?", default=-15.0)
parser.add_argument("stop", type=float, help="End angle of sweep. The sweep\
does not include this value.", nargs="?", default=15.0)
parser.add_argument("step", type=float, default=1.0, nargs="?",
help="Spacing between values.")
parser.add_argument("--append", "-a", action="store_true", default=False,
help="Append to previous results")
args = parser.parse_args()
alpha_sweep(args.start, args.stop, args.step, append=args.append)
|
<commit_before><commit_msg>Add script for running parameter sweep<commit_after>
|
#!/usr/bin/env python
"""
Run multiple simulations varying a single parameter.
"""
import numpy as np
from subprocess import call
import os
import pandas as pd
import argparse
def read_force_coeffs():
"""Read force coefficients from `postProcessing/actuatorLines`."""
df = pd.read_csv("postProcessing/actuatorLines/0/foil.csv")
df = df.iloc[-1]
return df[["time", "rel_vel_mag", "alpha_geom_deg", "alpha_deg", "cl",
"cd", "cm"]]
def read_turbulence_fields():
"""Dummy function for now."""
return {"k": np.nan, "omega": np.nan, "epsilon": np.nan, "nut": np.nan,
"z_turbulence": np.nan}
def alpha_sweep(start, stop, step, append=False):
"""Vary the foil angle of attack and log results."""
alpha_list = np.arange(start, stop, step)
df_fname = "processed/alpha_sweep.csv"
if append:
df = pd.read_csv(df_fname)
else:
df = pd.DataFrame(columns=["time", "rel_vel_mag", "alpha_geom_deg",
"alpha_deg", "cl", "cd", "cm", "k", "omega",
"epsilon", "nut", "z_turbulence"])
for alpha in alpha_list:
call("./Allclean")
call(["./Allrun", str(alpha)])
d = dict(read_force_coeffs())
d.update(read_turbulence_fields())
df = df.append(d, ignore_index=True)
df.to_csv(df_fname, index=False)
if __name__ == "__main__":
if not os.path.isdir("processed"):
os.mkdir("processed")
parser = argparse.ArgumentParser(description="Vary the foil angle of \
attack and log results.")
parser.add_argument("start", type=float, help="Start angle of sweep.",
nargs="?", default=-15.0)
parser.add_argument("stop", type=float, help="End angle of sweep. The sweep\
does not include this value.", nargs="?", default=15.0)
parser.add_argument("step", type=float, default=1.0, nargs="?",
help="Spacing between values.")
parser.add_argument("--append", "-a", action="store_true", default=False,
help="Append to previous results")
args = parser.parse_args()
alpha_sweep(args.start, args.stop, args.step, append=args.append)
|
Add script for running parameter sweep#!/usr/bin/env python
"""
Run multiple simulations varying a single parameter.
"""
import numpy as np
from subprocess import call
import os
import pandas as pd
import argparse
def read_force_coeffs():
"""Read force coefficients from `postProcessing/actuatorLines`."""
df = pd.read_csv("postProcessing/actuatorLines/0/foil.csv")
df = df.iloc[-1]
return df[["time", "rel_vel_mag", "alpha_geom_deg", "alpha_deg", "cl",
"cd", "cm"]]
def read_turbulence_fields():
"""Dummy function for now."""
return {"k": np.nan, "omega": np.nan, "epsilon": np.nan, "nut": np.nan,
"z_turbulence": np.nan}
def alpha_sweep(start, stop, step, append=False):
"""Vary the foil angle of attack and log results."""
alpha_list = np.arange(start, stop, step)
df_fname = "processed/alpha_sweep.csv"
if append:
df = pd.read_csv(df_fname)
else:
df = pd.DataFrame(columns=["time", "rel_vel_mag", "alpha_geom_deg",
"alpha_deg", "cl", "cd", "cm", "k", "omega",
"epsilon", "nut", "z_turbulence"])
for alpha in alpha_list:
call("./Allclean")
call(["./Allrun", str(alpha)])
d = dict(read_force_coeffs())
d.update(read_turbulence_fields())
df = df.append(d, ignore_index=True)
df.to_csv(df_fname, index=False)
if __name__ == "__main__":
if not os.path.isdir("processed"):
os.mkdir("processed")
parser = argparse.ArgumentParser(description="Vary the foil angle of \
attack and log results.")
parser.add_argument("start", type=float, help="Start angle of sweep.",
nargs="?", default=-15.0)
parser.add_argument("stop", type=float, help="End angle of sweep. The sweep\
does not include this value.", nargs="?", default=15.0)
parser.add_argument("step", type=float, default=1.0, nargs="?",
help="Spacing between values.")
parser.add_argument("--append", "-a", action="store_true", default=False,
help="Append to previous results")
args = parser.parse_args()
alpha_sweep(args.start, args.stop, args.step, append=args.append)
|
<commit_before><commit_msg>Add script for running parameter sweep<commit_after>#!/usr/bin/env python
"""
Run multiple simulations varying a single parameter.
"""
import numpy as np
from subprocess import call
import os
import pandas as pd
import argparse
def read_force_coeffs():
"""Read force coefficients from `postProcessing/actuatorLines`."""
df = pd.read_csv("postProcessing/actuatorLines/0/foil.csv")
df = df.iloc[-1]
return df[["time", "rel_vel_mag", "alpha_geom_deg", "alpha_deg", "cl",
"cd", "cm"]]
def read_turbulence_fields():
"""Dummy function for now."""
return {"k": np.nan, "omega": np.nan, "epsilon": np.nan, "nut": np.nan,
"z_turbulence": np.nan}
def alpha_sweep(start, stop, step, append=False):
"""Vary the foil angle of attack and log results."""
alpha_list = np.arange(start, stop, step)
df_fname = "processed/alpha_sweep.csv"
if append:
df = pd.read_csv(df_fname)
else:
df = pd.DataFrame(columns=["time", "rel_vel_mag", "alpha_geom_deg",
"alpha_deg", "cl", "cd", "cm", "k", "omega",
"epsilon", "nut", "z_turbulence"])
for alpha in alpha_list:
call("./Allclean")
call(["./Allrun", str(alpha)])
d = dict(read_force_coeffs())
d.update(read_turbulence_fields())
df = df.append(d, ignore_index=True)
df.to_csv(df_fname, index=False)
if __name__ == "__main__":
if not os.path.isdir("processed"):
os.mkdir("processed")
parser = argparse.ArgumentParser(description="Vary the foil angle of \
attack and log results.")
parser.add_argument("start", type=float, help="Start angle of sweep.",
nargs="?", default=-15.0)
parser.add_argument("stop", type=float, help="End angle of sweep. The sweep\
does not include this value.", nargs="?", default=15.0)
parser.add_argument("step", type=float, default=1.0, nargs="?",
help="Spacing between values.")
parser.add_argument("--append", "-a", action="store_true", default=False,
help="Append to previous results")
args = parser.parse_args()
alpha_sweep(args.start, args.stop, args.step, append=args.append)
|
|
a8736df223cefbccd97d392b011fbb511c190569
|
tests/test_create_elb.py
|
tests/test_create_elb.py
|
"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
Verify Health Check splaying works
|
tests: Verify Health Check splaying works
See also: PSOBAT-1399
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
tests: Verify Health Check splaying works
See also: PSOBAT-1399
|
"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
<commit_before><commit_msg>tests: Verify Health Check splaying works
See also: PSOBAT-1399<commit_after>
|
"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
tests: Verify Health Check splaying works
See also: PSOBAT-1399"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
<commit_before><commit_msg>tests: Verify Health Check splaying works
See also: PSOBAT-1399<commit_after>"""Test ELB creation functions."""
from foremast.elb.create_elb import SpinnakerELB
def test_splay():
"""Splay should split Health Checks properly."""
health = SpinnakerELB.splay_health('HTTP:80/test')
assert health.path == '/test'
assert health.port == '80'
assert health.proto == 'HTTP'
assert health.target == 'HTTP:80/test'
health = SpinnakerELB.splay_health('TCP:8000/test')
assert health.path == ''
assert health.port == '8000'
assert health.proto == 'TCP'
assert health.target == 'TCP:8000'
health = SpinnakerELB.splay_health('HTTPS:8000/test')
assert health.path == '/test'
assert health.port == '8000'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:8000/test'
health = SpinnakerELB.splay_health('HTTPS:80')
assert health.path == '/healthcheck'
assert health.port == '80'
assert health.proto == 'HTTPS'
assert health.target == 'HTTPS:80/healthcheck'
|
|
0e4a97d5bc3e13e2249437c9216b1eae111a480e
|
django_images/migrations/0002_auto_20180826_0814.py
|
django_images/migrations/0002_auto_20180826_0814.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import core.utils
class Migration(migrations.Migration):
dependencies = [
('django_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
migrations.AlterField(
model_name='thumbnail',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
]
|
Add migration file for lastest version of django_images
|
Fix: Add migration file for lastest version of django_images
|
Python
|
bsd-2-clause
|
pinry/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry
|
Fix: Add migration file for lastest version of django_images
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import core.utils
class Migration(migrations.Migration):
dependencies = [
('django_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
migrations.AlterField(
model_name='thumbnail',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
]
|
<commit_before><commit_msg>Fix: Add migration file for lastest version of django_images<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import core.utils
class Migration(migrations.Migration):
dependencies = [
('django_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
migrations.AlterField(
model_name='thumbnail',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
]
|
Fix: Add migration file for lastest version of django_images# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import core.utils
class Migration(migrations.Migration):
dependencies = [
('django_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
migrations.AlterField(
model_name='thumbnail',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
]
|
<commit_before><commit_msg>Fix: Add migration file for lastest version of django_images<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import core.utils
class Migration(migrations.Migration):
dependencies = [
('django_images', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
migrations.AlterField(
model_name='thumbnail',
name='image',
field=models.ImageField(upload_to=core.utils.upload_path, width_field='width', max_length=255, height_field='height'),
),
]
|
|
86b8ca811f4d582e7820b16e689fd8c28eac656f
|
djconnectwise/migrations/0040_auto_20170926_2145.py
|
djconnectwise/migrations/0040_auto_20170926_2145.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djconnectwise', '0039_auto_20170925_1418'),
]
operations = [
migrations.AlterModelOptions(
name='opportunity',
options={'verbose_name_plural': 'Opportunities', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='opportunitystatus',
options={'verbose_name_plural': 'Opportunity statuses', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='scheduleentry',
options={'verbose_name_plural': 'Schedule entries', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='schedulestatus',
options={'verbose_name_plural': 'Schedule statuses'},
),
]
|
Change minor meta options on models
|
Change minor meta options on models
|
Python
|
mit
|
KerkhoffTechnologies/django-connectwise,KerkhoffTechnologies/django-connectwise
|
Change minor meta options on models
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djconnectwise', '0039_auto_20170925_1418'),
]
operations = [
migrations.AlterModelOptions(
name='opportunity',
options={'verbose_name_plural': 'Opportunities', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='opportunitystatus',
options={'verbose_name_plural': 'Opportunity statuses', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='scheduleentry',
options={'verbose_name_plural': 'Schedule entries', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='schedulestatus',
options={'verbose_name_plural': 'Schedule statuses'},
),
]
|
<commit_before><commit_msg>Change minor meta options on models<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djconnectwise', '0039_auto_20170925_1418'),
]
operations = [
migrations.AlterModelOptions(
name='opportunity',
options={'verbose_name_plural': 'Opportunities', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='opportunitystatus',
options={'verbose_name_plural': 'Opportunity statuses', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='scheduleentry',
options={'verbose_name_plural': 'Schedule entries', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='schedulestatus',
options={'verbose_name_plural': 'Schedule statuses'},
),
]
|
Change minor meta options on models# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djconnectwise', '0039_auto_20170925_1418'),
]
operations = [
migrations.AlterModelOptions(
name='opportunity',
options={'verbose_name_plural': 'Opportunities', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='opportunitystatus',
options={'verbose_name_plural': 'Opportunity statuses', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='scheduleentry',
options={'verbose_name_plural': 'Schedule entries', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='schedulestatus',
options={'verbose_name_plural': 'Schedule statuses'},
),
]
|
<commit_before><commit_msg>Change minor meta options on models<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djconnectwise', '0039_auto_20170925_1418'),
]
operations = [
migrations.AlterModelOptions(
name='opportunity',
options={'verbose_name_plural': 'Opportunities', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='opportunitystatus',
options={'verbose_name_plural': 'Opportunity statuses', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='scheduleentry',
options={'verbose_name_plural': 'Schedule entries', 'ordering': ('name',)},
),
migrations.AlterModelOptions(
name='schedulestatus',
options={'verbose_name_plural': 'Schedule statuses'},
),
]
|
|
5f5f26a9d31c5c647d69e0400e381abd0ec103b0
|
lwr/managers/util/env.py
|
lwr/managers/util/env.py
|
RAW_VALUE_BY_DEFAULT = False
def env_to_statement(env):
''' Return the abstraction description of an environment variable definition
into a statement for shell script.
>>> env_to_statement(dict(name='X', value='Y'))
'X="Y"; export X'
>>> env_to_statement(dict(name='X', value='Y', raw=True))
'X=Y; export X'
>>> env_to_statement(dict(name='X', value='"A","B","C"'))
'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
'''
name = env['name']
value = env['value']
raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
if not raw:
value = '"' + value.replace('"', '\\"') + '"'
return '%s=%s; export %s' % (name, value, name)
|
Add missing file from previous commit (thanks Izzet Fatih).
|
Add missing file from previous commit (thanks Izzet Fatih).
|
Python
|
apache-2.0
|
jmchilton/pulsar,ssorgatem/pulsar,ssorgatem/pulsar,natefoo/pulsar,jmchilton/pulsar,galaxyproject/pulsar,jmchilton/lwr,natefoo/pulsar,jmchilton/lwr,galaxyproject/pulsar
|
Add missing file from previous commit (thanks Izzet Fatih).
|
RAW_VALUE_BY_DEFAULT = False
def env_to_statement(env):
''' Return the abstraction description of an environment variable definition
into a statement for shell script.
>>> env_to_statement(dict(name='X', value='Y'))
'X="Y"; export X'
>>> env_to_statement(dict(name='X', value='Y', raw=True))
'X=Y; export X'
>>> env_to_statement(dict(name='X', value='"A","B","C"'))
'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
'''
name = env['name']
value = env['value']
raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
if not raw:
value = '"' + value.replace('"', '\\"') + '"'
return '%s=%s; export %s' % (name, value, name)
|
<commit_before><commit_msg>Add missing file from previous commit (thanks Izzet Fatih).<commit_after>
|
RAW_VALUE_BY_DEFAULT = False
def env_to_statement(env):
''' Return the abstraction description of an environment variable definition
into a statement for shell script.
>>> env_to_statement(dict(name='X', value='Y'))
'X="Y"; export X'
>>> env_to_statement(dict(name='X', value='Y', raw=True))
'X=Y; export X'
>>> env_to_statement(dict(name='X', value='"A","B","C"'))
'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
'''
name = env['name']
value = env['value']
raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
if not raw:
value = '"' + value.replace('"', '\\"') + '"'
return '%s=%s; export %s' % (name, value, name)
|
Add missing file from previous commit (thanks Izzet Fatih).
RAW_VALUE_BY_DEFAULT = False
def env_to_statement(env):
''' Return the abstraction description of an environment variable definition
into a statement for shell script.
>>> env_to_statement(dict(name='X', value='Y'))
'X="Y"; export X'
>>> env_to_statement(dict(name='X', value='Y', raw=True))
'X=Y; export X'
>>> env_to_statement(dict(name='X', value='"A","B","C"'))
'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
'''
name = env['name']
value = env['value']
raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
if not raw:
value = '"' + value.replace('"', '\\"') + '"'
return '%s=%s; export %s' % (name, value, name)
|
<commit_before><commit_msg>Add missing file from previous commit (thanks Izzet Fatih).<commit_after>
RAW_VALUE_BY_DEFAULT = False
def env_to_statement(env):
''' Return the abstraction description of an environment variable definition
into a statement for shell script.
>>> env_to_statement(dict(name='X', value='Y'))
'X="Y"; export X'
>>> env_to_statement(dict(name='X', value='Y', raw=True))
'X=Y; export X'
>>> env_to_statement(dict(name='X', value='"A","B","C"'))
'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X'
'''
name = env['name']
value = env['value']
raw = env.get('raw', RAW_VALUE_BY_DEFAULT)
if not raw:
value = '"' + value.replace('"', '\\"') + '"'
return '%s=%s; export %s' % (name, value, name)
|
|
0ed4c7a502db204a6503dfa43e384d58632f44ab
|
biobox_cli/biobox_file.py
|
biobox_cli/biobox_file.py
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : "ref"}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : ref}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
Fix type in creation of fasta_dir biobox entry
|
Fix type in creation of fasta_dir biobox entry
|
Python
|
mit
|
michaelbarton/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,bioboxes/command-line-interface
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : "ref"}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
Fix type in creation of fasta_dir biobox entry
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : ref}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
<commit_before>import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : "ref"}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
<commit_msg>Fix type in creation of fasta_dir biobox entry<commit_after>
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : ref}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : "ref"}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
Fix type in creation of fasta_dir biobox entryimport os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : ref}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
<commit_before>import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : "ref"}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
<commit_msg>Fix type in creation of fasta_dir biobox entry<commit_after>import os
import yaml
def generate(args):
output = {"version" : "0.9.0", "arguments" : args}
return yaml.safe_dump(output, default_flow_style = False)
def parse(dir_):
with open(os.path.join(dir_, 'biobox.yaml'), 'r') as f:
return yaml.load(f.read())
def fastq_arguments(args):
return files_values("fastq", args)
def fasta_arguments(args):
return files_values("fasta", args)
def reference_argument(ref):
return {"fasta_dir": [{"id" : 1, "type" : "reference", "value" : ref}]}
def files_values(identifier, args):
values = [entry(identifier + "_" + str(i), p_c, t) for (i, (p_c, t)) in enumerate(args)]
return {identifier : values}
def entry(id_, value, type_):
return {"id" : id_, "value" : value, "type" : type_}
def create_biobox_directory(content):
import tempfile as tmp
dir_ = tmp.mkdtemp()
with open(os.path.join(dir_, "biobox.yaml"), "w") as f:
f.write(content)
return dir_
|
503944776a5cf0a97355989b255c7beb1e7ce059
|
kaggle/titanic/categorical_and_scaler_prediction.py
|
kaggle/titanic/categorical_and_scaler_prediction.py
|
import pandas
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']]
print(train)
if __name__ == '__main__':
main()
|
Read in train data to dataframe
|
Read in train data to dataframe
|
Python
|
mit
|
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
|
Read in train data to dataframe
|
import pandas
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']]
print(train)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Read in train data to dataframe<commit_after>
|
import pandas
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']]
print(train)
if __name__ == '__main__':
main()
|
Read in train data to dataframeimport pandas
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']]
print(train)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Read in train data to dataframe<commit_after>import pandas
def main():
train_all = pandas.DataFrame.from_csv('train.csv')
train = train_all[['Survived', 'Sex', 'Fare']]
print(train)
if __name__ == '__main__':
main()
|
|
81db70aaab1dc2cd481dd2b5a616fdc251ca21d5
|
src/member/migrations/0033_auto_20170801_1607.py
|
src/member/migrations/0033_auto_20170801_1607.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 20:07
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0032_relationship'),
]
operations = [
migrations.AlterField(
model_name='relationship',
name='extra_fields',
field=annoying.fields.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='relationship',
name='remark',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='relationship',
name='type',
field=annoying.fields.JSONField(blank=True, default=[]),
),
]
|
Add the missing migration in the members app.
|
Add the missing migration in the members app.
Ref: issue #791
|
Python
|
agpl-3.0
|
savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast
|
Add the missing migration in the members app.
Ref: issue #791
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 20:07
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0032_relationship'),
]
operations = [
migrations.AlterField(
model_name='relationship',
name='extra_fields',
field=annoying.fields.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='relationship',
name='remark',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='relationship',
name='type',
field=annoying.fields.JSONField(blank=True, default=[]),
),
]
|
<commit_before><commit_msg>Add the missing migration in the members app.
Ref: issue #791<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 20:07
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0032_relationship'),
]
operations = [
migrations.AlterField(
model_name='relationship',
name='extra_fields',
field=annoying.fields.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='relationship',
name='remark',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='relationship',
name='type',
field=annoying.fields.JSONField(blank=True, default=[]),
),
]
|
Add the missing migration in the members app.
Ref: issue #791# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 20:07
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0032_relationship'),
]
operations = [
migrations.AlterField(
model_name='relationship',
name='extra_fields',
field=annoying.fields.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='relationship',
name='remark',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='relationship',
name='type',
field=annoying.fields.JSONField(blank=True, default=[]),
),
]
|
<commit_before><commit_msg>Add the missing migration in the members app.
Ref: issue #791<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 20:07
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('member', '0032_relationship'),
]
operations = [
migrations.AlterField(
model_name='relationship',
name='extra_fields',
field=annoying.fields.JSONField(blank=True, default={}),
),
migrations.AlterField(
model_name='relationship',
name='remark',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='relationship',
name='type',
field=annoying.fields.JSONField(blank=True, default=[]),
),
]
|
|
12939515450702b511ee01c4806f6c97a4c5ae9b
|
orchard/bot_changelog.py
|
orchard/bot_changelog.py
|
"""Command to get the bot's changelog by running git log on the current working directory."""
import asyncio
from asyncio.subprocess import PIPE
from plumeria.command import commands
from plumeria.perms import owners_only
async def get_git_log():
proc = await asyncio.create_subprocess_exec('git', 'log', '--oneline', '--abbrev-commit', '--all', '--graph',
'--decorate', stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = await proc.communicate()
if proc.returncode == 0:
return stdout.decode('utf-8', 'ignore').strip().splitlines()
else:
return "unknown"
@commands.create('changelog', category='Utility')
@owners_only
async def changelog(message):
"""
Grab the bot's changelog, derived from the Git repo.
Example::
changelog
"""
log = await get_git_log()
return "```{}```".format("\n".join(log[:10]))
def setup():
commands.add(changelog)
|
Add changelog plugin and command.
|
Add changelog plugin and command.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
Add changelog plugin and command.
|
"""Command to get the bot's changelog by running git log on the current working directory."""
import asyncio
from asyncio.subprocess import PIPE
from plumeria.command import commands
from plumeria.perms import owners_only
async def get_git_log():
proc = await asyncio.create_subprocess_exec('git', 'log', '--oneline', '--abbrev-commit', '--all', '--graph',
'--decorate', stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = await proc.communicate()
if proc.returncode == 0:
return stdout.decode('utf-8', 'ignore').strip().splitlines()
else:
return "unknown"
@commands.create('changelog', category='Utility')
@owners_only
async def changelog(message):
"""
Grab the bot's changelog, derived from the Git repo.
Example::
changelog
"""
log = await get_git_log()
return "```{}```".format("\n".join(log[:10]))
def setup():
commands.add(changelog)
|
<commit_before><commit_msg>Add changelog plugin and command.<commit_after>
|
"""Command to get the bot's changelog by running git log on the current working directory."""
import asyncio
from asyncio.subprocess import PIPE
from plumeria.command import commands
from plumeria.perms import owners_only
async def get_git_log():
proc = await asyncio.create_subprocess_exec('git', 'log', '--oneline', '--abbrev-commit', '--all', '--graph',
'--decorate', stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = await proc.communicate()
if proc.returncode == 0:
return stdout.decode('utf-8', 'ignore').strip().splitlines()
else:
return "unknown"
@commands.create('changelog', category='Utility')
@owners_only
async def changelog(message):
"""
Grab the bot's changelog, derived from the Git repo.
Example::
changelog
"""
log = await get_git_log()
return "```{}```".format("\n".join(log[:10]))
def setup():
commands.add(changelog)
|
Add changelog plugin and command."""Command to get the bot's changelog by running git log on the current working directory."""
import asyncio
from asyncio.subprocess import PIPE
from plumeria.command import commands
from plumeria.perms import owners_only
async def get_git_log():
proc = await asyncio.create_subprocess_exec('git', 'log', '--oneline', '--abbrev-commit', '--all', '--graph',
'--decorate', stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = await proc.communicate()
if proc.returncode == 0:
return stdout.decode('utf-8', 'ignore').strip().splitlines()
else:
return "unknown"
@commands.create('changelog', category='Utility')
@owners_only
async def changelog(message):
"""
Grab the bot's changelog, derived from the Git repo.
Example::
changelog
"""
log = await get_git_log()
return "```{}```".format("\n".join(log[:10]))
def setup():
commands.add(changelog)
|
<commit_before><commit_msg>Add changelog plugin and command.<commit_after>"""Command to get the bot's changelog by running git log on the current working directory."""
import asyncio
from asyncio.subprocess import PIPE
from plumeria.command import commands
from plumeria.perms import owners_only
async def get_git_log():
proc = await asyncio.create_subprocess_exec('git', 'log', '--oneline', '--abbrev-commit', '--all', '--graph',
'--decorate', stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = await proc.communicate()
if proc.returncode == 0:
return stdout.decode('utf-8', 'ignore').strip().splitlines()
else:
return "unknown"
@commands.create('changelog', category='Utility')
@owners_only
async def changelog(message):
"""
Grab the bot's changelog, derived from the Git repo.
Example::
changelog
"""
log = await get_git_log()
return "```{}```".format("\n".join(log[:10]))
def setup():
commands.add(changelog)
|
|
63eec63e79b9eb21d4f2c23d683c85212beb49f0
|
usr/examples/04-Image-Filters/kernel_filters.py
|
usr/examples/04-Image-Filters/kernel_filters.py
|
# Kernel Filtering Example
#
# This example shows off how to use a generic kernel filter.
import sensor, image, time
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
kernel_size = 1 # 3x3==1, 5x5==2, 7x7==3, etc.
kernel = [-2, -1, 0, \
-1, 1, 1, \
0, 1, 2]
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# Run the kernel on every pixel of the image.
img.morph(kernel_size, kernel)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.
|
Add in a demo for embossing.
|
Add in a demo for embossing.
|
Python
|
mit
|
iabdalkader/openmv,iabdalkader/openmv,openmv/openmv,iabdalkader/openmv,kwagyeman/openmv,openmv/openmv,openmv/openmv,kwagyeman/openmv,kwagyeman/openmv,openmv/openmv,iabdalkader/openmv,kwagyeman/openmv
|
Add in a demo for embossing.
|
# Kernel Filtering Example
#
# This example shows off how to use a generic kernel filter.
import sensor, image, time
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
kernel_size = 1 # 3x3==1, 5x5==2, 7x7==3, etc.
kernel = [-2, -1, 0, \
-1, 1, 1, \
0, 1, 2]
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# Run the kernel on every pixel of the image.
img.morph(kernel_size, kernel)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.
|
<commit_before><commit_msg>Add in a demo for embossing.<commit_after>
|
# Kernel Filtering Example
#
# This example shows off how to use a generic kernel filter.
import sensor, image, time
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
kernel_size = 1 # 3x3==1, 5x5==2, 7x7==3, etc.
kernel = [-2, -1, 0, \
-1, 1, 1, \
0, 1, 2]
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# Run the kernel on every pixel of the image.
img.morph(kernel_size, kernel)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.
|
Add in a demo for embossing.# Kernel Filtering Example
#
# This example shows off how to use a generic kernel filter.
import sensor, image, time
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
kernel_size = 1 # 3x3==1, 5x5==2, 7x7==3, etc.
kernel = [-2, -1, 0, \
-1, 1, 1, \
0, 1, 2]
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# Run the kernel on every pixel of the image.
img.morph(kernel_size, kernel)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.
|
<commit_before><commit_msg>Add in a demo for embossing.<commit_after># Kernel Filtering Example
#
# This example shows off how to use a generic kernel filter.
import sensor, image, time
sensor.reset() # Initialize the camera sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # or sensor.RGB565
sensor.set_framesize(sensor.QVGA) # or sensor.QQVGA (or others)
sensor.skip_frames(time = 2000) # Let new settings take affect.
clock = time.clock() # Tracks FPS.
kernel_size = 1 # 3x3==1, 5x5==2, 7x7==3, etc.
kernel = [-2, -1, 0, \
-1, 1, 1, \
0, 1, 2]
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
# Run the kernel on every pixel of the image.
img.morph(kernel_size, kernel)
print(clock.fps()) # Note: Your OpenMV Cam runs about half as fast while
# connected to your computer. The FPS should increase once disconnected.
|
|
3a2bac74716b6e15ee04322839f342a4609356f0
|
enthought/pyface/ipython_widget.py
|
enthought/pyface/ipython_widget.py
|
#------------------------------------------------------------------------------
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The implementation of an IPython shell. """
# Import the toolkit specific version.
try:
import IPython.frontend
except ImportError:
raise ImportError, '''
________________________________________________________________________________
Could not load the Wx frontend for ipython.
You need to have ipython >= 0.9 installed to use the ipython widget.'''
from toolkit import toolkit_object
IPythonWidget= toolkit_object('ipython_widget:IPythonWidget')
#### EOF ######################################################################
|
Add an ipython pyface widget.
|
Add an ipython pyface widget.
|
Python
|
bsd-3-clause
|
brett-patterson/pyface,geggo/pyface,geggo/pyface,enthought/traitsgui,pankajp/pyface
|
Add an ipython pyface widget.
|
#------------------------------------------------------------------------------
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The implementation of an IPython shell. """
# Import the toolkit specific version.
try:
import IPython.frontend
except ImportError:
raise ImportError, '''
________________________________________________________________________________
Could not load the Wx frontend for ipython.
You need to have ipython >= 0.9 installed to use the ipython widget.'''
from toolkit import toolkit_object
IPythonWidget= toolkit_object('ipython_widget:IPythonWidget')
#### EOF ######################################################################
|
<commit_before><commit_msg>Add an ipython pyface widget.<commit_after>
|
#------------------------------------------------------------------------------
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The implementation of an IPython shell. """
# Import the toolkit specific version.
try:
import IPython.frontend
except ImportError:
raise ImportError, '''
________________________________________________________________________________
Could not load the Wx frontend for ipython.
You need to have ipython >= 0.9 installed to use the ipython widget.'''
from toolkit import toolkit_object
IPythonWidget= toolkit_object('ipython_widget:IPythonWidget')
#### EOF ######################################################################
|
Add an ipython pyface widget.#------------------------------------------------------------------------------
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The implementation of an IPython shell. """
# Import the toolkit specific version.
try:
import IPython.frontend
except ImportError:
raise ImportError, '''
________________________________________________________________________________
Could not load the Wx frontend for ipython.
You need to have ipython >= 0.9 installed to use the ipython widget.'''
from toolkit import toolkit_object
IPythonWidget= toolkit_object('ipython_widget:IPythonWidget')
#### EOF ######################################################################
|
<commit_before><commit_msg>Add an ipython pyface widget.<commit_after>#------------------------------------------------------------------------------
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" The implementation of an IPython shell. """
# Import the toolkit specific version.
try:
import IPython.frontend
except ImportError:
raise ImportError, '''
________________________________________________________________________________
Could not load the Wx frontend for ipython.
You need to have ipython >= 0.9 installed to use the ipython widget.'''
from toolkit import toolkit_object
IPythonWidget= toolkit_object('ipython_widget:IPythonWidget')
#### EOF ######################################################################
|
|
59200b241ed46f2427e0f10ff3f57fed4f919d02
|
migrations/versions/0328_international_letters_perm.py
|
migrations/versions/0328_international_letters_perm.py
|
"""
Revision ID: 0328_international_letters_perm
Revises: 0327_idx_notification_history
Create Date: 2020-08-10 14:12:02.870838
"""
from alembic import op
from sqlalchemy import text
revision = '0328_international_letters_perm'
down_revision = '0327_idx_notification_history'
def upgrade():
sql = """
SELECT distinct(service_id) service_id
FROM service_permissions
WHERE service_id not in (SELECT service_id FROM service_permissions WHERE permission = 'international_letters')
"""
insert_sql = """
INSERT INTO service_permissions(service_id, permission, created_at)
VALUES (:service_id, 'international_letters', now())
"""
conn = op.get_bind()
results = conn.execute(sql)
services_to_add_permission = results.fetchall()
for x in services_to_add_permission:
conn.execute(text(insert_sql), service_id=x.service_id)
def downgrade():
pass
|
Add international_letters service permission for all services.
|
Add international_letters service permission for all services.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add international_letters service permission for all services.
|
"""
Revision ID: 0328_international_letters_perm
Revises: 0327_idx_notification_history
Create Date: 2020-08-10 14:12:02.870838
"""
from alembic import op
from sqlalchemy import text
revision = '0328_international_letters_perm'
down_revision = '0327_idx_notification_history'
def upgrade():
sql = """
SELECT distinct(service_id) service_id
FROM service_permissions
WHERE service_id not in (SELECT service_id FROM service_permissions WHERE permission = 'international_letters')
"""
insert_sql = """
INSERT INTO service_permissions(service_id, permission, created_at)
VALUES (:service_id, 'international_letters', now())
"""
conn = op.get_bind()
results = conn.execute(sql)
services_to_add_permission = results.fetchall()
for x in services_to_add_permission:
conn.execute(text(insert_sql), service_id=x.service_id)
def downgrade():
pass
|
<commit_before><commit_msg>Add international_letters service permission for all services.<commit_after>
|
"""
Revision ID: 0328_international_letters_perm
Revises: 0327_idx_notification_history
Create Date: 2020-08-10 14:12:02.870838
"""
from alembic import op
from sqlalchemy import text
revision = '0328_international_letters_perm'
down_revision = '0327_idx_notification_history'
def upgrade():
sql = """
SELECT distinct(service_id) service_id
FROM service_permissions
WHERE service_id not in (SELECT service_id FROM service_permissions WHERE permission = 'international_letters')
"""
insert_sql = """
INSERT INTO service_permissions(service_id, permission, created_at)
VALUES (:service_id, 'international_letters', now())
"""
conn = op.get_bind()
results = conn.execute(sql)
services_to_add_permission = results.fetchall()
for x in services_to_add_permission:
conn.execute(text(insert_sql), service_id=x.service_id)
def downgrade():
pass
|
Add international_letters service permission for all services."""
Revision ID: 0328_international_letters_perm
Revises: 0327_idx_notification_history
Create Date: 2020-08-10 14:12:02.870838
"""
from alembic import op
from sqlalchemy import text
revision = '0328_international_letters_perm'
down_revision = '0327_idx_notification_history'
def upgrade():
sql = """
SELECT distinct(service_id) service_id
FROM service_permissions
WHERE service_id not in (SELECT service_id FROM service_permissions WHERE permission = 'international_letters')
"""
insert_sql = """
INSERT INTO service_permissions(service_id, permission, created_at)
VALUES (:service_id, 'international_letters', now())
"""
conn = op.get_bind()
results = conn.execute(sql)
services_to_add_permission = results.fetchall()
for x in services_to_add_permission:
conn.execute(text(insert_sql), service_id=x.service_id)
def downgrade():
pass
|
<commit_before><commit_msg>Add international_letters service permission for all services.<commit_after>"""
Revision ID: 0328_international_letters_perm
Revises: 0327_idx_notification_history
Create Date: 2020-08-10 14:12:02.870838
"""
from alembic import op
from sqlalchemy import text
revision = '0328_international_letters_perm'
down_revision = '0327_idx_notification_history'
def upgrade():
sql = """
SELECT distinct(service_id) service_id
FROM service_permissions
WHERE service_id not in (SELECT service_id FROM service_permissions WHERE permission = 'international_letters')
"""
insert_sql = """
INSERT INTO service_permissions(service_id, permission, created_at)
VALUES (:service_id, 'international_letters', now())
"""
conn = op.get_bind()
results = conn.execute(sql)
services_to_add_permission = results.fetchall()
for x in services_to_add_permission:
conn.execute(text(insert_sql), service_id=x.service_id)
def downgrade():
pass
|
|
de438b1548fcfd995a65fd5937de5b86dccc5eff
|
tools/del_stage.py
|
tools/del_stage.py
|
import argparse
import os
import sys
import sqlitedict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--state",
help="file to read/write action state"
" information into/from (default=%(default)s)",
default=os.path.join(os.getcwd(), "state.sqlite"),
metavar="PATH")
parser.add_argument("-s", "--stage",
help="stage name to delete/drop",
default=None, required=True)
parser.add_argument("-t", "--table",
help="table name to use",
default=None, required=True)
args = parser.parse_args()
with sqlitedict.SqliteDict(filename=args.state, flag='c',
tablename=args.table,
autocommit=False) as tracker:
try:
del tracker[args.stage]
except KeyError:
print("Stage '%s' not found." % args.stage)
sys.exit(1)
else:
print("Stage '%s' removed." % args.stage)
tracker.sync()
if __name__ == '__main__':
main()
|
Add tool to drop a specific stage (for rerunning)
|
Add tool to drop a specific stage (for rerunning)
|
Python
|
apache-2.0
|
harlowja/multi-devstack,harlowja/multi-devstack
|
Add tool to drop a specific stage (for rerunning)
|
import argparse
import os
import sys
import sqlitedict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--state",
help="file to read/write action state"
" information into/from (default=%(default)s)",
default=os.path.join(os.getcwd(), "state.sqlite"),
metavar="PATH")
parser.add_argument("-s", "--stage",
help="stage name to delete/drop",
default=None, required=True)
parser.add_argument("-t", "--table",
help="table name to use",
default=None, required=True)
args = parser.parse_args()
with sqlitedict.SqliteDict(filename=args.state, flag='c',
tablename=args.table,
autocommit=False) as tracker:
try:
del tracker[args.stage]
except KeyError:
print("Stage '%s' not found." % args.stage)
sys.exit(1)
else:
print("Stage '%s' removed." % args.stage)
tracker.sync()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to drop a specific stage (for rerunning)<commit_after>
|
import argparse
import os
import sys
import sqlitedict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--state",
help="file to read/write action state"
" information into/from (default=%(default)s)",
default=os.path.join(os.getcwd(), "state.sqlite"),
metavar="PATH")
parser.add_argument("-s", "--stage",
help="stage name to delete/drop",
default=None, required=True)
parser.add_argument("-t", "--table",
help="table name to use",
default=None, required=True)
args = parser.parse_args()
with sqlitedict.SqliteDict(filename=args.state, flag='c',
tablename=args.table,
autocommit=False) as tracker:
try:
del tracker[args.stage]
except KeyError:
print("Stage '%s' not found." % args.stage)
sys.exit(1)
else:
print("Stage '%s' removed." % args.stage)
tracker.sync()
if __name__ == '__main__':
main()
|
Add tool to drop a specific stage (for rerunning)import argparse
import os
import sys
import sqlitedict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--state",
help="file to read/write action state"
" information into/from (default=%(default)s)",
default=os.path.join(os.getcwd(), "state.sqlite"),
metavar="PATH")
parser.add_argument("-s", "--stage",
help="stage name to delete/drop",
default=None, required=True)
parser.add_argument("-t", "--table",
help="table name to use",
default=None, required=True)
args = parser.parse_args()
with sqlitedict.SqliteDict(filename=args.state, flag='c',
tablename=args.table,
autocommit=False) as tracker:
try:
del tracker[args.stage]
except KeyError:
print("Stage '%s' not found." % args.stage)
sys.exit(1)
else:
print("Stage '%s' removed." % args.stage)
tracker.sync()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add tool to drop a specific stage (for rerunning)<commit_after>import argparse
import os
import sys
import sqlitedict
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--state",
help="file to read/write action state"
" information into/from (default=%(default)s)",
default=os.path.join(os.getcwd(), "state.sqlite"),
metavar="PATH")
parser.add_argument("-s", "--stage",
help="stage name to delete/drop",
default=None, required=True)
parser.add_argument("-t", "--table",
help="table name to use",
default=None, required=True)
args = parser.parse_args()
with sqlitedict.SqliteDict(filename=args.state, flag='c',
tablename=args.table,
autocommit=False) as tracker:
try:
del tracker[args.stage]
except KeyError:
print("Stage '%s' not found." % args.stage)
sys.exit(1)
else:
print("Stage '%s' removed." % args.stage)
tracker.sync()
if __name__ == '__main__':
main()
|
|
32995e41cf8e465cf7c2e6ec2ec7c17c36b6d031
|
letsmeet/events/migrations/0010_event_max_attendees.py
|
letsmeet/events/migrations/0010_event_max_attendees.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0009_auto_20160107_2144'),
]
operations = [
migrations.AddField(
model_name='event',
name='max_attendees',
field=models.PositiveIntegerField(
help_text='Optional maximum number of attendees for this event. Leave blank for no limit.', null=True),
),
]
|
Add migration for new model field.
|
Add migration for new model field.
|
Python
|
mit
|
letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click
|
Add migration for new model field.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0009_auto_20160107_2144'),
]
operations = [
migrations.AddField(
model_name='event',
name='max_attendees',
field=models.PositiveIntegerField(
help_text='Optional maximum number of attendees for this event. Leave blank for no limit.', null=True),
),
]
|
<commit_before><commit_msg>Add migration for new model field.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0009_auto_20160107_2144'),
]
operations = [
migrations.AddField(
model_name='event',
name='max_attendees',
field=models.PositiveIntegerField(
help_text='Optional maximum number of attendees for this event. Leave blank for no limit.', null=True),
),
]
|
Add migration for new model field.# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0009_auto_20160107_2144'),
]
operations = [
migrations.AddField(
model_name='event',
name='max_attendees',
field=models.PositiveIntegerField(
help_text='Optional maximum number of attendees for this event. Leave blank for no limit.', null=True),
),
]
|
<commit_before><commit_msg>Add migration for new model field.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-18 21:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0009_auto_20160107_2144'),
]
operations = [
migrations.AddField(
model_name='event',
name='max_attendees',
field=models.PositiveIntegerField(
help_text='Optional maximum number of attendees for this event. Leave blank for no limit.', null=True),
),
]
|
|
3460a627b535a55eedefb7ec5a37fe068f3d7abd
|
tests/fixtures/postgres.py
|
tests/fixtures/postgres.py
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
Add fixtures for connecting to Postgres test database
|
Add fixtures for connecting to Postgres test database
|
Python
|
mit
|
igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool
|
Add fixtures for connecting to Postgres test database
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
<commit_before><commit_msg>Add fixtures for connecting to Postgres test database<commit_after>
|
import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
Add fixtures for connecting to Postgres test databaseimport pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
<commit_before><commit_msg>Add fixtures for connecting to Postgres test database<commit_after>import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close()
|
|
8d7b83f93818779225681746cf06f6f1b0164330
|
raiden/tests/unit/transfer/test_state_diff.py
|
raiden/tests/unit/transfer/test_state_diff.py
|
import random
from copy import deepcopy
from raiden.transfer.state import (
ChainState,
NettingChannelEndState,
NettingChannelState,
PaymentNetworkState,
TokenNetworkState,
TransactionExecutionStatus,
)
from raiden.transfer.views import detect_balance_proof_change
def test_detect_balance_proof_change():
prng = random.Random()
old = ChainState(prng, 1, 2, 3)
new = ChainState(prng, 1, 2, 3)
def diff():
return list(detect_balance_proof_change(old, new))
assert len(diff()) == 0
payment_network = PaymentNetworkState(b'x', [])
payment_network_copy = deepcopy(payment_network)
new.identifiers_to_paymentnetworks['a'] = payment_network
assert len(diff()) == 0
token_network = TokenNetworkState(b'a', b'a')
token_network_copy = deepcopy(token_network)
payment_network.tokenidentifiers_to_tokennetworks['a'] = token_network
assert len(diff()) == 0
channel = NettingChannelState(
1,
0,
b'a',
1,
1,
1,
2,
None,
None,
TransactionExecutionStatus(result='success'),
)
channel_copy = deepcopy(channel)
token_network.channelidentifiers_to_channels['a'] = channel
partner_state = NettingChannelEndState(b'a', 0)
partner_state_copy = deepcopy(partner_state)
channel.partner_state = partner_state
assert len(diff()) == 0
balance_proof = object()
partner_state.balance_proof = balance_proof
assert len(diff()) == 1
old.identifiers_to_paymentnetworks['a'] = payment_network_copy
assert len(diff()) == 1
payment_network_copy.tokenidentifiers_to_tokennetworks['a'] = token_network_copy
assert len(diff()) == 1
token_network_copy.channelidentifiers_to_channels['a'] = channel_copy
channel_copy.partner_state = partner_state_copy
assert len(diff()) == 1
channel_copy.partner_state.balance_proof = balance_proof
assert len(diff()) == 0
channel_copy.partner_state.balance_proof = object()
assert len(diff()) == 1
assert diff() == [balance_proof]
|
Add unit test for state diff method
|
Add unit test for state diff method
|
Python
|
mit
|
hackaugusto/raiden,hackaugusto/raiden
|
Add unit test for state diff method
|
import random
from copy import deepcopy
from raiden.transfer.state import (
ChainState,
NettingChannelEndState,
NettingChannelState,
PaymentNetworkState,
TokenNetworkState,
TransactionExecutionStatus,
)
from raiden.transfer.views import detect_balance_proof_change
def test_detect_balance_proof_change():
prng = random.Random()
old = ChainState(prng, 1, 2, 3)
new = ChainState(prng, 1, 2, 3)
def diff():
return list(detect_balance_proof_change(old, new))
assert len(diff()) == 0
payment_network = PaymentNetworkState(b'x', [])
payment_network_copy = deepcopy(payment_network)
new.identifiers_to_paymentnetworks['a'] = payment_network
assert len(diff()) == 0
token_network = TokenNetworkState(b'a', b'a')
token_network_copy = deepcopy(token_network)
payment_network.tokenidentifiers_to_tokennetworks['a'] = token_network
assert len(diff()) == 0
channel = NettingChannelState(
1,
0,
b'a',
1,
1,
1,
2,
None,
None,
TransactionExecutionStatus(result='success'),
)
channel_copy = deepcopy(channel)
token_network.channelidentifiers_to_channels['a'] = channel
partner_state = NettingChannelEndState(b'a', 0)
partner_state_copy = deepcopy(partner_state)
channel.partner_state = partner_state
assert len(diff()) == 0
balance_proof = object()
partner_state.balance_proof = balance_proof
assert len(diff()) == 1
old.identifiers_to_paymentnetworks['a'] = payment_network_copy
assert len(diff()) == 1
payment_network_copy.tokenidentifiers_to_tokennetworks['a'] = token_network_copy
assert len(diff()) == 1
token_network_copy.channelidentifiers_to_channels['a'] = channel_copy
channel_copy.partner_state = partner_state_copy
assert len(diff()) == 1
channel_copy.partner_state.balance_proof = balance_proof
assert len(diff()) == 0
channel_copy.partner_state.balance_proof = object()
assert len(diff()) == 1
assert diff() == [balance_proof]
|
<commit_before><commit_msg>Add unit test for state diff method<commit_after>
|
import random
from copy import deepcopy
from raiden.transfer.state import (
ChainState,
NettingChannelEndState,
NettingChannelState,
PaymentNetworkState,
TokenNetworkState,
TransactionExecutionStatus,
)
from raiden.transfer.views import detect_balance_proof_change
def test_detect_balance_proof_change():
prng = random.Random()
old = ChainState(prng, 1, 2, 3)
new = ChainState(prng, 1, 2, 3)
def diff():
return list(detect_balance_proof_change(old, new))
assert len(diff()) == 0
payment_network = PaymentNetworkState(b'x', [])
payment_network_copy = deepcopy(payment_network)
new.identifiers_to_paymentnetworks['a'] = payment_network
assert len(diff()) == 0
token_network = TokenNetworkState(b'a', b'a')
token_network_copy = deepcopy(token_network)
payment_network.tokenidentifiers_to_tokennetworks['a'] = token_network
assert len(diff()) == 0
channel = NettingChannelState(
1,
0,
b'a',
1,
1,
1,
2,
None,
None,
TransactionExecutionStatus(result='success'),
)
channel_copy = deepcopy(channel)
token_network.channelidentifiers_to_channels['a'] = channel
partner_state = NettingChannelEndState(b'a', 0)
partner_state_copy = deepcopy(partner_state)
channel.partner_state = partner_state
assert len(diff()) == 0
balance_proof = object()
partner_state.balance_proof = balance_proof
assert len(diff()) == 1
old.identifiers_to_paymentnetworks['a'] = payment_network_copy
assert len(diff()) == 1
payment_network_copy.tokenidentifiers_to_tokennetworks['a'] = token_network_copy
assert len(diff()) == 1
token_network_copy.channelidentifiers_to_channels['a'] = channel_copy
channel_copy.partner_state = partner_state_copy
assert len(diff()) == 1
channel_copy.partner_state.balance_proof = balance_proof
assert len(diff()) == 0
channel_copy.partner_state.balance_proof = object()
assert len(diff()) == 1
assert diff() == [balance_proof]
|
Add unit test for state diff methodimport random
from copy import deepcopy
from raiden.transfer.state import (
ChainState,
NettingChannelEndState,
NettingChannelState,
PaymentNetworkState,
TokenNetworkState,
TransactionExecutionStatus,
)
from raiden.transfer.views import detect_balance_proof_change
def test_detect_balance_proof_change():
prng = random.Random()
old = ChainState(prng, 1, 2, 3)
new = ChainState(prng, 1, 2, 3)
def diff():
return list(detect_balance_proof_change(old, new))
assert len(diff()) == 0
payment_network = PaymentNetworkState(b'x', [])
payment_network_copy = deepcopy(payment_network)
new.identifiers_to_paymentnetworks['a'] = payment_network
assert len(diff()) == 0
token_network = TokenNetworkState(b'a', b'a')
token_network_copy = deepcopy(token_network)
payment_network.tokenidentifiers_to_tokennetworks['a'] = token_network
assert len(diff()) == 0
channel = NettingChannelState(
1,
0,
b'a',
1,
1,
1,
2,
None,
None,
TransactionExecutionStatus(result='success'),
)
channel_copy = deepcopy(channel)
token_network.channelidentifiers_to_channels['a'] = channel
partner_state = NettingChannelEndState(b'a', 0)
partner_state_copy = deepcopy(partner_state)
channel.partner_state = partner_state
assert len(diff()) == 0
balance_proof = object()
partner_state.balance_proof = balance_proof
assert len(diff()) == 1
old.identifiers_to_paymentnetworks['a'] = payment_network_copy
assert len(diff()) == 1
payment_network_copy.tokenidentifiers_to_tokennetworks['a'] = token_network_copy
assert len(diff()) == 1
token_network_copy.channelidentifiers_to_channels['a'] = channel_copy
channel_copy.partner_state = partner_state_copy
assert len(diff()) == 1
channel_copy.partner_state.balance_proof = balance_proof
assert len(diff()) == 0
channel_copy.partner_state.balance_proof = object()
assert len(diff()) == 1
assert diff() == [balance_proof]
|
<commit_before><commit_msg>Add unit test for state diff method<commit_after>import random
from copy import deepcopy
from raiden.transfer.state import (
ChainState,
NettingChannelEndState,
NettingChannelState,
PaymentNetworkState,
TokenNetworkState,
TransactionExecutionStatus,
)
from raiden.transfer.views import detect_balance_proof_change
def test_detect_balance_proof_change():
prng = random.Random()
old = ChainState(prng, 1, 2, 3)
new = ChainState(prng, 1, 2, 3)
def diff():
return list(detect_balance_proof_change(old, new))
assert len(diff()) == 0
payment_network = PaymentNetworkState(b'x', [])
payment_network_copy = deepcopy(payment_network)
new.identifiers_to_paymentnetworks['a'] = payment_network
assert len(diff()) == 0
token_network = TokenNetworkState(b'a', b'a')
token_network_copy = deepcopy(token_network)
payment_network.tokenidentifiers_to_tokennetworks['a'] = token_network
assert len(diff()) == 0
channel = NettingChannelState(
1,
0,
b'a',
1,
1,
1,
2,
None,
None,
TransactionExecutionStatus(result='success'),
)
channel_copy = deepcopy(channel)
token_network.channelidentifiers_to_channels['a'] = channel
partner_state = NettingChannelEndState(b'a', 0)
partner_state_copy = deepcopy(partner_state)
channel.partner_state = partner_state
assert len(diff()) == 0
balance_proof = object()
partner_state.balance_proof = balance_proof
assert len(diff()) == 1
old.identifiers_to_paymentnetworks['a'] = payment_network_copy
assert len(diff()) == 1
payment_network_copy.tokenidentifiers_to_tokennetworks['a'] = token_network_copy
assert len(diff()) == 1
token_network_copy.channelidentifiers_to_channels['a'] = channel_copy
channel_copy.partner_state = partner_state_copy
assert len(diff()) == 1
channel_copy.partner_state.balance_proof = balance_proof
assert len(diff()) == 0
channel_copy.partner_state.balance_proof = object()
assert len(diff()) == 1
assert diff() == [balance_proof]
|
|
1cdb85e1b9ae941602731c7799a60282d6e44e97
|
tools/analyze_pipestats.py
|
tools/analyze_pipestats.py
|
#!/usr/bin/python
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
from matplotlib import pyplot as plt
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('window', 100, 'Size of rolling average window')
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'indigo']
def max_default(seq, default):
try:
return np.max(seq)
except ValueError:
return default
def main(argv):
max_val = 0
max_key = ""
n = 0
for fn in argv[1:]:
name = os.path.basename(fn)
xs = np.loadtxt(fn, dtype=np.int)
ys = np.arange(len(xs))
delta = ys - np.array([max_default(ys[xs < x - FLAGS.window], np.NaN)
for x in xs])
max_delta = np.nanmax(delta)
if max_delta > max_val:
max_val = max_delta
max_key = name
plt.plot(xs, delta, color=COLORS[n % len(COLORS)], label=name)
print xs, delta
n += 1
print "Max delta %d in %s" % (max_val, max_key)
print "Buffer size: %d KB" % (max_val * 4)
plt.legend()
plt.show()
if __name__ == '__main__':
app.run(main)
|
Add script to visualize pipestats.
|
Add script to visualize pipestats.
Bug: 78765090
Change-Id: If95a59a7020a835968aa9fdf1660b97b4ed6bb4e
|
Python
|
apache-2.0
|
google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto
|
Add script to visualize pipestats.
Bug: 78765090
Change-Id: If95a59a7020a835968aa9fdf1660b97b4ed6bb4e
|
#!/usr/bin/python
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
from matplotlib import pyplot as plt
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('window', 100, 'Size of rolling average window')
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'indigo']
def max_default(seq, default):
try:
return np.max(seq)
except ValueError:
return default
def main(argv):
max_val = 0
max_key = ""
n = 0
for fn in argv[1:]:
name = os.path.basename(fn)
xs = np.loadtxt(fn, dtype=np.int)
ys = np.arange(len(xs))
delta = ys - np.array([max_default(ys[xs < x - FLAGS.window], np.NaN)
for x in xs])
max_delta = np.nanmax(delta)
if max_delta > max_val:
max_val = max_delta
max_key = name
plt.plot(xs, delta, color=COLORS[n % len(COLORS)], label=name)
print xs, delta
n += 1
print "Max delta %d in %s" % (max_val, max_key)
print "Buffer size: %d KB" % (max_val * 4)
plt.legend()
plt.show()
if __name__ == '__main__':
app.run(main)
|
<commit_before><commit_msg>Add script to visualize pipestats.
Bug: 78765090
Change-Id: If95a59a7020a835968aa9fdf1660b97b4ed6bb4e<commit_after>
|
#!/usr/bin/python
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
from matplotlib import pyplot as plt
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('window', 100, 'Size of rolling average window')
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'indigo']
def max_default(seq, default):
try:
return np.max(seq)
except ValueError:
return default
def main(argv):
max_val = 0
max_key = ""
n = 0
for fn in argv[1:]:
name = os.path.basename(fn)
xs = np.loadtxt(fn, dtype=np.int)
ys = np.arange(len(xs))
delta = ys - np.array([max_default(ys[xs < x - FLAGS.window], np.NaN)
for x in xs])
max_delta = np.nanmax(delta)
if max_delta > max_val:
max_val = max_delta
max_key = name
plt.plot(xs, delta, color=COLORS[n % len(COLORS)], label=name)
print xs, delta
n += 1
print "Max delta %d in %s" % (max_val, max_key)
print "Buffer size: %d KB" % (max_val * 4)
plt.legend()
plt.show()
if __name__ == '__main__':
app.run(main)
|
Add script to visualize pipestats.
Bug: 78765090
Change-Id: If95a59a7020a835968aa9fdf1660b97b4ed6bb4e#!/usr/bin/python
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
from matplotlib import pyplot as plt
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('window', 100, 'Size of rolling average window')
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'indigo']
def max_default(seq, default):
try:
return np.max(seq)
except ValueError:
return default
def main(argv):
max_val = 0
max_key = ""
n = 0
for fn in argv[1:]:
name = os.path.basename(fn)
xs = np.loadtxt(fn, dtype=np.int)
ys = np.arange(len(xs))
delta = ys - np.array([max_default(ys[xs < x - FLAGS.window], np.NaN)
for x in xs])
max_delta = np.nanmax(delta)
if max_delta > max_val:
max_val = max_delta
max_key = name
plt.plot(xs, delta, color=COLORS[n % len(COLORS)], label=name)
print xs, delta
n += 1
print "Max delta %d in %s" % (max_val, max_key)
print "Buffer size: %d KB" % (max_val * 4)
plt.legend()
plt.show()
if __name__ == '__main__':
app.run(main)
|
<commit_before><commit_msg>Add script to visualize pipestats.
Bug: 78765090
Change-Id: If95a59a7020a835968aa9fdf1660b97b4ed6bb4e<commit_after>#!/usr/bin/python
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
from matplotlib import pyplot as plt
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('window', 100, 'Size of rolling average window')
COLORS = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'indigo']
def max_default(seq, default):
try:
return np.max(seq)
except ValueError:
return default
def main(argv):
max_val = 0
max_key = ""
n = 0
for fn in argv[1:]:
name = os.path.basename(fn)
xs = np.loadtxt(fn, dtype=np.int)
ys = np.arange(len(xs))
delta = ys - np.array([max_default(ys[xs < x - FLAGS.window], np.NaN)
for x in xs])
max_delta = np.nanmax(delta)
if max_delta > max_val:
max_val = max_delta
max_key = name
plt.plot(xs, delta, color=COLORS[n % len(COLORS)], label=name)
print xs, delta
n += 1
print "Max delta %d in %s" % (max_val, max_key)
print "Buffer size: %d KB" % (max_val * 4)
plt.legend()
plt.show()
if __name__ == '__main__':
app.run(main)
|
|
dda447ce46e5140a8cf39d2615424a9c5655f798
|
senlin/tests/tempest/api/profiles/test_profile_list.py
|
senlin/tests/tempest/api/profiles/test_profile_list.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.common import constants
class TestProfileShow(base.BaseSenlinTest):
@classmethod
def resource_setup(cls):
super(TestProfileShow, cls).resource_setup()
# Create profile
cls.profile = cls.create_profile(constants.spec_nova_server)
@classmethod
def resource_cleanup(cls):
# Delete profile
cls.client.delete_obj('profiles', cls.profile['id'])
@decorators.idempotent_id('329d3026-12f7-4369-845b-05914e2a8678')
def test_list_profile(self):
res = self.client.list_objs('profiles')
# Verify resp of profile list API
self.assertEqual(200, res['status'])
self.assertIsNone(res['location'])
self.assertIsNotNone(res['body'])
profiles = res['body']
ids = []
for profile in profiles:
for key in ['created_at', 'domain', 'id', 'metadata', 'name',
'project', 'spec', 'type', 'updated_at', 'user']:
self.assertIn(key, profile)
ids.append(profile['id'])
self.assertIn(self.profile['id'], ids)
|
Add API test for profile list
|
Add API test for profile list
Add API test for profile list
Change-Id: Ia317e86f253d84e3e00c68c5e9db6afbed6cf604
|
Python
|
apache-2.0
|
openstack/senlin,openstack/senlin,stackforge/senlin,stackforge/senlin,openstack/senlin
|
Add API test for profile list
Add API test for profile list
Change-Id: Ia317e86f253d84e3e00c68c5e9db6afbed6cf604
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.common import constants
class TestProfileShow(base.BaseSenlinTest):
@classmethod
def resource_setup(cls):
super(TestProfileShow, cls).resource_setup()
# Create profile
cls.profile = cls.create_profile(constants.spec_nova_server)
@classmethod
def resource_cleanup(cls):
# Delete profile
cls.client.delete_obj('profiles', cls.profile['id'])
@decorators.idempotent_id('329d3026-12f7-4369-845b-05914e2a8678')
def test_list_profile(self):
res = self.client.list_objs('profiles')
# Verify resp of profile list API
self.assertEqual(200, res['status'])
self.assertIsNone(res['location'])
self.assertIsNotNone(res['body'])
profiles = res['body']
ids = []
for profile in profiles:
for key in ['created_at', 'domain', 'id', 'metadata', 'name',
'project', 'spec', 'type', 'updated_at', 'user']:
self.assertIn(key, profile)
ids.append(profile['id'])
self.assertIn(self.profile['id'], ids)
|
<commit_before><commit_msg>Add API test for profile list
Add API test for profile list
Change-Id: Ia317e86f253d84e3e00c68c5e9db6afbed6cf604<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.common import constants
class TestProfileShow(base.BaseSenlinTest):
@classmethod
def resource_setup(cls):
super(TestProfileShow, cls).resource_setup()
# Create profile
cls.profile = cls.create_profile(constants.spec_nova_server)
@classmethod
def resource_cleanup(cls):
# Delete profile
cls.client.delete_obj('profiles', cls.profile['id'])
@decorators.idempotent_id('329d3026-12f7-4369-845b-05914e2a8678')
def test_list_profile(self):
res = self.client.list_objs('profiles')
# Verify resp of profile list API
self.assertEqual(200, res['status'])
self.assertIsNone(res['location'])
self.assertIsNotNone(res['body'])
profiles = res['body']
ids = []
for profile in profiles:
for key in ['created_at', 'domain', 'id', 'metadata', 'name',
'project', 'spec', 'type', 'updated_at', 'user']:
self.assertIn(key, profile)
ids.append(profile['id'])
self.assertIn(self.profile['id'], ids)
|
Add API test for profile list
Add API test for profile list
Change-Id: Ia317e86f253d84e3e00c68c5e9db6afbed6cf604# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.common import constants
class TestProfileShow(base.BaseSenlinTest):
@classmethod
def resource_setup(cls):
super(TestProfileShow, cls).resource_setup()
# Create profile
cls.profile = cls.create_profile(constants.spec_nova_server)
@classmethod
def resource_cleanup(cls):
# Delete profile
cls.client.delete_obj('profiles', cls.profile['id'])
@decorators.idempotent_id('329d3026-12f7-4369-845b-05914e2a8678')
def test_list_profile(self):
res = self.client.list_objs('profiles')
# Verify resp of profile list API
self.assertEqual(200, res['status'])
self.assertIsNone(res['location'])
self.assertIsNotNone(res['body'])
profiles = res['body']
ids = []
for profile in profiles:
for key in ['created_at', 'domain', 'id', 'metadata', 'name',
'project', 'spec', 'type', 'updated_at', 'user']:
self.assertIn(key, profile)
ids.append(profile['id'])
self.assertIn(self.profile['id'], ids)
|
<commit_before><commit_msg>Add API test for profile list
Add API test for profile list
Change-Id: Ia317e86f253d84e3e00c68c5e9db6afbed6cf604<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.common import constants
class TestProfileShow(base.BaseSenlinTest):
@classmethod
def resource_setup(cls):
super(TestProfileShow, cls).resource_setup()
# Create profile
cls.profile = cls.create_profile(constants.spec_nova_server)
@classmethod
def resource_cleanup(cls):
# Delete profile
cls.client.delete_obj('profiles', cls.profile['id'])
@decorators.idempotent_id('329d3026-12f7-4369-845b-05914e2a8678')
def test_list_profile(self):
res = self.client.list_objs('profiles')
# Verify resp of profile list API
self.assertEqual(200, res['status'])
self.assertIsNone(res['location'])
self.assertIsNotNone(res['body'])
profiles = res['body']
ids = []
for profile in profiles:
for key in ['created_at', 'domain', 'id', 'metadata', 'name',
'project', 'spec', 'type', 'updated_at', 'user']:
self.assertIn(key, profile)
ids.append(profile['id'])
self.assertIn(self.profile['id'], ids)
|
|
1f31e28ec1abb40dc25a8ff0c7e872d404483881
|
app/ashlar/migrations/0002_auto_20150421_1724.py
|
app/ashlar/migrations/0002_auto_20150421_1724.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ashlar.models import RecordSchema, Record, ItemSchema
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0001_initial'),
]
create_gin_index_sql = 'CREATE INDEX {index_name} ON {table} USING gin({column})'
drop_gin_index_sql = 'DROP INDEX IF EXISTS {index_name}'
def _get_field_db_column(cls, fieldname):
"""Returns the name of the database column corresponding to a field on a Django Model
:param cls: Subjclass of django.db.models.Model
:param fieldname: Name of a field on cls
:returns: String with database column name corresponding to fieldname
"""
# Both of these get_* functions return tuples of information; the
# numbers are just the indexes of the information we want, which is a
# Field instance and the db column name, respectively.
return cls._meta.get_field_by_name(fieldname)[0].get_attname_column()[1]
operations = [
# Records
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_record_data_gin',
table=Record._meta.db_table,
column=_get_field_db_column(Record, 'data')),
drop_gin_index_sql.format(index_name='ashlar_record_data_gin')),
# RecordSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin',
table=RecordSchema._meta.db_table,
column=_get_field_db_column(RecordSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin')),
# ItemSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin',
table=ItemSchema._meta.db_table,
column=_get_field_db_column(ItemSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin'))
]
|
Add GIN index to JSONB fields via migration
|
Add GIN index to JSONB fields via migration
|
Python
|
mit
|
flibbertigibbet/ashlar,flibbertigibbet/ashlar,azavea/ashlar,azavea/ashlar
|
Add GIN index to JSONB fields via migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ashlar.models import RecordSchema, Record, ItemSchema
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0001_initial'),
]
create_gin_index_sql = 'CREATE INDEX {index_name} ON {table} USING gin({column})'
drop_gin_index_sql = 'DROP INDEX IF EXISTS {index_name}'
def _get_field_db_column(cls, fieldname):
"""Returns the name of the database column corresponding to a field on a Django Model
:param cls: Subjclass of django.db.models.Model
:param fieldname: Name of a field on cls
:returns: String with database column name corresponding to fieldname
"""
# Both of these get_* functions return tuples of information; the
# numbers are just the indexes of the information we want, which is a
# Field instance and the db column name, respectively.
return cls._meta.get_field_by_name(fieldname)[0].get_attname_column()[1]
operations = [
# Records
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_record_data_gin',
table=Record._meta.db_table,
column=_get_field_db_column(Record, 'data')),
drop_gin_index_sql.format(index_name='ashlar_record_data_gin')),
# RecordSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin',
table=RecordSchema._meta.db_table,
column=_get_field_db_column(RecordSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin')),
# ItemSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin',
table=ItemSchema._meta.db_table,
column=_get_field_db_column(ItemSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin'))
]
|
<commit_before><commit_msg>Add GIN index to JSONB fields via migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ashlar.models import RecordSchema, Record, ItemSchema
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0001_initial'),
]
create_gin_index_sql = 'CREATE INDEX {index_name} ON {table} USING gin({column})'
drop_gin_index_sql = 'DROP INDEX IF EXISTS {index_name}'
def _get_field_db_column(cls, fieldname):
"""Returns the name of the database column corresponding to a field on a Django Model
:param cls: Subjclass of django.db.models.Model
:param fieldname: Name of a field on cls
:returns: String with database column name corresponding to fieldname
"""
# Both of these get_* functions return tuples of information; the
# numbers are just the indexes of the information we want, which is a
# Field instance and the db column name, respectively.
return cls._meta.get_field_by_name(fieldname)[0].get_attname_column()[1]
operations = [
# Records
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_record_data_gin',
table=Record._meta.db_table,
column=_get_field_db_column(Record, 'data')),
drop_gin_index_sql.format(index_name='ashlar_record_data_gin')),
# RecordSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin',
table=RecordSchema._meta.db_table,
column=_get_field_db_column(RecordSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin')),
# ItemSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin',
table=ItemSchema._meta.db_table,
column=_get_field_db_column(ItemSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin'))
]
|
Add GIN index to JSONB fields via migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ashlar.models import RecordSchema, Record, ItemSchema
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0001_initial'),
]
create_gin_index_sql = 'CREATE INDEX {index_name} ON {table} USING gin({column})'
drop_gin_index_sql = 'DROP INDEX IF EXISTS {index_name}'
def _get_field_db_column(cls, fieldname):
"""Returns the name of the database column corresponding to a field on a Django Model
:param cls: Subjclass of django.db.models.Model
:param fieldname: Name of a field on cls
:returns: String with database column name corresponding to fieldname
"""
# Both of these get_* functions return tuples of information; the
# numbers are just the indexes of the information we want, which is a
# Field instance and the db column name, respectively.
return cls._meta.get_field_by_name(fieldname)[0].get_attname_column()[1]
operations = [
# Records
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_record_data_gin',
table=Record._meta.db_table,
column=_get_field_db_column(Record, 'data')),
drop_gin_index_sql.format(index_name='ashlar_record_data_gin')),
# RecordSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin',
table=RecordSchema._meta.db_table,
column=_get_field_db_column(RecordSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin')),
# ItemSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin',
table=ItemSchema._meta.db_table,
column=_get_field_db_column(ItemSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin'))
]
|
<commit_before><commit_msg>Add GIN index to JSONB fields via migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from ashlar.models import RecordSchema, Record, ItemSchema
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0001_initial'),
]
create_gin_index_sql = 'CREATE INDEX {index_name} ON {table} USING gin({column})'
drop_gin_index_sql = 'DROP INDEX IF EXISTS {index_name}'
def _get_field_db_column(cls, fieldname):
"""Returns the name of the database column corresponding to a field on a Django Model
:param cls: Subjclass of django.db.models.Model
:param fieldname: Name of a field on cls
:returns: String with database column name corresponding to fieldname
"""
# Both of these get_* functions return tuples of information; the
# numbers are just the indexes of the information we want, which is a
# Field instance and the db column name, respectively.
return cls._meta.get_field_by_name(fieldname)[0].get_attname_column()[1]
operations = [
# Records
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_record_data_gin',
table=Record._meta.db_table,
column=_get_field_db_column(Record, 'data')),
drop_gin_index_sql.format(index_name='ashlar_record_data_gin')),
# RecordSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin',
table=RecordSchema._meta.db_table,
column=_get_field_db_column(RecordSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_recordschema_schema_gin')),
# ItemSchema
migrations.RunSQL(create_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin',
table=ItemSchema._meta.db_table,
column=_get_field_db_column(ItemSchema, 'schema')),
drop_gin_index_sql.format(index_name='ashlar_itemschema_schema_gin'))
]
|
|
211c1e11e73e9ca8dfd485098f18a68b52869308
|
CalendarApi.py
|
CalendarApi.py
|
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file,client,tools
try:
import argparse
flags=argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags=none
def create_remainder(name,date):
#REFERENCE:https://developers.google.com/google-apps/calendar/v3/reference/events/insert#examples
print name+" "+date
SCOPES='https://www.googleapis.com/auth/calendar'
store=file.Storage('storage.json')
creds=store.get()
if not creds or creds.invalid:
flow=client.flow_from_clientsecrets('client_secret.json',SCOPES)
creds=tools.run_flow(flow,store,flags) \
if flags else tools.run(flow,store)
CAL=build('calendar','v3',http=creds.authorize(Http()))
#Events in JSON format
ID="fokql1u8cutplmdfeu6ql4hqs8@group.calendar.google.com"
EVENT={
'summary':name,
'start':{'dateTime':date+'T7:00:00+05:30'},
'end': {'dateTime':date+'T8:00:00+05:30'}
}
response = CAL.events().insert(calendarId=ID,sendNotifications=True,body=EVENT).execute()
if response['status']=='confirmed':
return 'Success'
else:
return 'Error Occured'
#print create_remainder("Test0","2016-09-07")
|
Create event in google calendar
|
Create event in google calendar
|
Python
|
mit
|
Ajithkumarsekar/KCT-Academic-calendar-Converter
|
Create event in google calendar
|
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file,client,tools
try:
import argparse
flags=argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags=none
def create_remainder(name,date):
#REFERENCE:https://developers.google.com/google-apps/calendar/v3/reference/events/insert#examples
print name+" "+date
SCOPES='https://www.googleapis.com/auth/calendar'
store=file.Storage('storage.json')
creds=store.get()
if not creds or creds.invalid:
flow=client.flow_from_clientsecrets('client_secret.json',SCOPES)
creds=tools.run_flow(flow,store,flags) \
if flags else tools.run(flow,store)
CAL=build('calendar','v3',http=creds.authorize(Http()))
#Events in JSON format
ID="fokql1u8cutplmdfeu6ql4hqs8@group.calendar.google.com"
EVENT={
'summary':name,
'start':{'dateTime':date+'T7:00:00+05:30'},
'end': {'dateTime':date+'T8:00:00+05:30'}
}
response = CAL.events().insert(calendarId=ID,sendNotifications=True,body=EVENT).execute()
if response['status']=='confirmed':
return 'Success'
else:
return 'Error Occured'
#print create_remainder("Test0","2016-09-07")
|
<commit_before><commit_msg>Create event in google calendar<commit_after>
|
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file,client,tools
try:
import argparse
flags=argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags=none
def create_remainder(name,date):
#REFERENCE:https://developers.google.com/google-apps/calendar/v3/reference/events/insert#examples
print name+" "+date
SCOPES='https://www.googleapis.com/auth/calendar'
store=file.Storage('storage.json')
creds=store.get()
if not creds or creds.invalid:
flow=client.flow_from_clientsecrets('client_secret.json',SCOPES)
creds=tools.run_flow(flow,store,flags) \
if flags else tools.run(flow,store)
CAL=build('calendar','v3',http=creds.authorize(Http()))
#Events in JSON format
ID="fokql1u8cutplmdfeu6ql4hqs8@group.calendar.google.com"
EVENT={
'summary':name,
'start':{'dateTime':date+'T7:00:00+05:30'},
'end': {'dateTime':date+'T8:00:00+05:30'}
}
response = CAL.events().insert(calendarId=ID,sendNotifications=True,body=EVENT).execute()
if response['status']=='confirmed':
return 'Success'
else:
return 'Error Occured'
#print create_remainder("Test0","2016-09-07")
|
Create event in google calendarfrom apiclient.discovery import build
from httplib2 import Http
from oauth2client import file,client,tools
try:
import argparse
flags=argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags=none
def create_remainder(name,date):
#REFERENCE:https://developers.google.com/google-apps/calendar/v3/reference/events/insert#examples
print name+" "+date
SCOPES='https://www.googleapis.com/auth/calendar'
store=file.Storage('storage.json')
creds=store.get()
if not creds or creds.invalid:
flow=client.flow_from_clientsecrets('client_secret.json',SCOPES)
creds=tools.run_flow(flow,store,flags) \
if flags else tools.run(flow,store)
CAL=build('calendar','v3',http=creds.authorize(Http()))
#Events in JSON format
ID="fokql1u8cutplmdfeu6ql4hqs8@group.calendar.google.com"
EVENT={
'summary':name,
'start':{'dateTime':date+'T7:00:00+05:30'},
'end': {'dateTime':date+'T8:00:00+05:30'}
}
response = CAL.events().insert(calendarId=ID,sendNotifications=True,body=EVENT).execute()
if response['status']=='confirmed':
return 'Success'
else:
return 'Error Occured'
#print create_remainder("Test0","2016-09-07")
|
<commit_before><commit_msg>Create event in google calendar<commit_after>from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file,client,tools
try:
import argparse
flags=argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags=none
def create_remainder(name,date):
#REFERENCE:https://developers.google.com/google-apps/calendar/v3/reference/events/insert#examples
print name+" "+date
SCOPES='https://www.googleapis.com/auth/calendar'
store=file.Storage('storage.json')
creds=store.get()
if not creds or creds.invalid:
flow=client.flow_from_clientsecrets('client_secret.json',SCOPES)
creds=tools.run_flow(flow,store,flags) \
if flags else tools.run(flow,store)
CAL=build('calendar','v3',http=creds.authorize(Http()))
#Events in JSON format
ID="fokql1u8cutplmdfeu6ql4hqs8@group.calendar.google.com"
EVENT={
'summary':name,
'start':{'dateTime':date+'T7:00:00+05:30'},
'end': {'dateTime':date+'T8:00:00+05:30'}
}
response = CAL.events().insert(calendarId=ID,sendNotifications=True,body=EVENT).execute()
if response['status']=='confirmed':
return 'Success'
else:
return 'Error Occured'
#print create_remainder("Test0","2016-09-07")
|
|
48bd26f2ec9cfb512aad55fb3f11ed7b7713b0dd
|
self_convert_tf_to_csv.py
|
self_convert_tf_to_csv.py
|
from subprocess import check_output
import tensorflow as tf
import csv
VIDEO_LEVEL_DATA_FODLER = "/Users/Sophie/Documents/youtube-8m-data/train/"
CSV_FILE_PATH = 'train.csv'
with open(CSV_FILE_PATH, 'w') as f:
fieldnames = ['video_id', 'mean_rgb', 'mean_audio', 'labels']
csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
csv_writer.writeheader()
train_tf_files = check_output(["ls", VIDEO_LEVEL_DATA_FODLER]).decode("UTF-8").split("\n")
for file_name in train_tf_files:
if file_name.endswith("tfrecord"):
print("file_name: {}".format(file_name))
for example in tf.python_io.tf_record_iterator(VIDEO_LEVEL_DATA_FODLER + file_name):
tf_example_feature = tf.train.Example.FromString(example).features.feature
video_id = tf_example_feature['video_id'].bytes_list.value[0].decode('UTF-8')
labels = tf_example_feature['labels'].int64_list.value
mean_rgb = tf_example_feature['mean_rgb'].float_list.value
mean_audio = tf_example_feature['mean_audio'].float_list.value
csv_writer.writerow({
'video_id': video_id, 'mean_rgb': ':'.join([str(e) for e in mean_rgb]),
'mean_audio': ':'.join([str(e) for e in mean_audio]), 'labels': ':'.join([str(e) for e in labels])
})
f.close()
|
Convert tfrecords to csv to be used in Spark.
|
Convert tfrecords to csv to be used in Spark.
|
Python
|
apache-2.0
|
lidalei/youtube-8m
|
Convert tfrecords to csv to be used in Spark.
|
from subprocess import check_output
import tensorflow as tf
import csv
VIDEO_LEVEL_DATA_FODLER = "/Users/Sophie/Documents/youtube-8m-data/train/"
CSV_FILE_PATH = 'train.csv'
with open(CSV_FILE_PATH, 'w') as f:
fieldnames = ['video_id', 'mean_rgb', 'mean_audio', 'labels']
csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
csv_writer.writeheader()
train_tf_files = check_output(["ls", VIDEO_LEVEL_DATA_FODLER]).decode("UTF-8").split("\n")
for file_name in train_tf_files:
if file_name.endswith("tfrecord"):
print("file_name: {}".format(file_name))
for example in tf.python_io.tf_record_iterator(VIDEO_LEVEL_DATA_FODLER + file_name):
tf_example_feature = tf.train.Example.FromString(example).features.feature
video_id = tf_example_feature['video_id'].bytes_list.value[0].decode('UTF-8')
labels = tf_example_feature['labels'].int64_list.value
mean_rgb = tf_example_feature['mean_rgb'].float_list.value
mean_audio = tf_example_feature['mean_audio'].float_list.value
csv_writer.writerow({
'video_id': video_id, 'mean_rgb': ':'.join([str(e) for e in mean_rgb]),
'mean_audio': ':'.join([str(e) for e in mean_audio]), 'labels': ':'.join([str(e) for e in labels])
})
f.close()
|
<commit_before><commit_msg>Convert tfrecords to csv to be used in Spark.<commit_after>
|
from subprocess import check_output
import tensorflow as tf
import csv
VIDEO_LEVEL_DATA_FODLER = "/Users/Sophie/Documents/youtube-8m-data/train/"
CSV_FILE_PATH = 'train.csv'
with open(CSV_FILE_PATH, 'w') as f:
fieldnames = ['video_id', 'mean_rgb', 'mean_audio', 'labels']
csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
csv_writer.writeheader()
train_tf_files = check_output(["ls", VIDEO_LEVEL_DATA_FODLER]).decode("UTF-8").split("\n")
for file_name in train_tf_files:
if file_name.endswith("tfrecord"):
print("file_name: {}".format(file_name))
for example in tf.python_io.tf_record_iterator(VIDEO_LEVEL_DATA_FODLER + file_name):
tf_example_feature = tf.train.Example.FromString(example).features.feature
video_id = tf_example_feature['video_id'].bytes_list.value[0].decode('UTF-8')
labels = tf_example_feature['labels'].int64_list.value
mean_rgb = tf_example_feature['mean_rgb'].float_list.value
mean_audio = tf_example_feature['mean_audio'].float_list.value
csv_writer.writerow({
'video_id': video_id, 'mean_rgb': ':'.join([str(e) for e in mean_rgb]),
'mean_audio': ':'.join([str(e) for e in mean_audio]), 'labels': ':'.join([str(e) for e in labels])
})
f.close()
|
Convert tfrecords to csv to be used in Spark.from subprocess import check_output
import tensorflow as tf
import csv
VIDEO_LEVEL_DATA_FODLER = "/Users/Sophie/Documents/youtube-8m-data/train/"
CSV_FILE_PATH = 'train.csv'
with open(CSV_FILE_PATH, 'w') as f:
fieldnames = ['video_id', 'mean_rgb', 'mean_audio', 'labels']
csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
csv_writer.writeheader()
train_tf_files = check_output(["ls", VIDEO_LEVEL_DATA_FODLER]).decode("UTF-8").split("\n")
for file_name in train_tf_files:
if file_name.endswith("tfrecord"):
print("file_name: {}".format(file_name))
for example in tf.python_io.tf_record_iterator(VIDEO_LEVEL_DATA_FODLER + file_name):
tf_example_feature = tf.train.Example.FromString(example).features.feature
video_id = tf_example_feature['video_id'].bytes_list.value[0].decode('UTF-8')
labels = tf_example_feature['labels'].int64_list.value
mean_rgb = tf_example_feature['mean_rgb'].float_list.value
mean_audio = tf_example_feature['mean_audio'].float_list.value
csv_writer.writerow({
'video_id': video_id, 'mean_rgb': ':'.join([str(e) for e in mean_rgb]),
'mean_audio': ':'.join([str(e) for e in mean_audio]), 'labels': ':'.join([str(e) for e in labels])
})
f.close()
|
<commit_before><commit_msg>Convert tfrecords to csv to be used in Spark.<commit_after>from subprocess import check_output
import tensorflow as tf
import csv
VIDEO_LEVEL_DATA_FODLER = "/Users/Sophie/Documents/youtube-8m-data/train/"
CSV_FILE_PATH = 'train.csv'
with open(CSV_FILE_PATH, 'w') as f:
fieldnames = ['video_id', 'mean_rgb', 'mean_audio', 'labels']
csv_writer = csv.DictWriter(f, fieldnames=fieldnames)
csv_writer.writeheader()
train_tf_files = check_output(["ls", VIDEO_LEVEL_DATA_FODLER]).decode("UTF-8").split("\n")
for file_name in train_tf_files:
if file_name.endswith("tfrecord"):
print("file_name: {}".format(file_name))
for example in tf.python_io.tf_record_iterator(VIDEO_LEVEL_DATA_FODLER + file_name):
tf_example_feature = tf.train.Example.FromString(example).features.feature
video_id = tf_example_feature['video_id'].bytes_list.value[0].decode('UTF-8')
labels = tf_example_feature['labels'].int64_list.value
mean_rgb = tf_example_feature['mean_rgb'].float_list.value
mean_audio = tf_example_feature['mean_audio'].float_list.value
csv_writer.writerow({
'video_id': video_id, 'mean_rgb': ':'.join([str(e) for e in mean_rgb]),
'mean_audio': ':'.join([str(e) for e in mean_audio]), 'labels': ':'.join([str(e) for e in labels])
})
f.close()
|
|
26574a6de1966bc0a0bb5d3625469c43ad83e1ff
|
redis-mass.py
|
redis-mass.py
|
#!/usr/bin/env python
"""
redis-mass.py
~~~~~~~~~~~~~
Prepares a newline-separated file of Redis commands for mass insertion.
:copyright: (c) 2015 by Tim Simmons.
:license: BSD, see LICENSE for more details.
"""
import sys
def proto(line):
result = "*%s\r\n$%s\r\n%s\r\n" % (str(len(line)), str(len(line[0])), line[0])
for arg in line[1:]:
result += "$%s\r\n%s\r\n" % (str(len(arg)), arg)
return result
if __name__ == "__main__":
try:
filename = sys.argv[1]
f = open(filename, 'r')
except IndexError:
f = sys.stdin.readlines()
for line in f:
print proto(line.rstrip().split(' ')),
|
Add Redis commands to Redis Mass Insertion script
|
Add Redis commands to Redis Mass Insertion script
|
Python
|
isc
|
Squab/redis-mass-insertion
|
Add Redis commands to Redis Mass Insertion script
|
#!/usr/bin/env python
"""
redis-mass.py
~~~~~~~~~~~~~
Prepares a newline-separated file of Redis commands for mass insertion.
:copyright: (c) 2015 by Tim Simmons.
:license: BSD, see LICENSE for more details.
"""
import sys
def proto(line):
result = "*%s\r\n$%s\r\n%s\r\n" % (str(len(line)), str(len(line[0])), line[0])
for arg in line[1:]:
result += "$%s\r\n%s\r\n" % (str(len(arg)), arg)
return result
if __name__ == "__main__":
try:
filename = sys.argv[1]
f = open(filename, 'r')
except IndexError:
f = sys.stdin.readlines()
for line in f:
print proto(line.rstrip().split(' ')),
|
<commit_before><commit_msg>Add Redis commands to Redis Mass Insertion script<commit_after>
|
#!/usr/bin/env python
"""
redis-mass.py
~~~~~~~~~~~~~
Prepares a newline-separated file of Redis commands for mass insertion.
:copyright: (c) 2015 by Tim Simmons.
:license: BSD, see LICENSE for more details.
"""
import sys
def proto(line):
result = "*%s\r\n$%s\r\n%s\r\n" % (str(len(line)), str(len(line[0])), line[0])
for arg in line[1:]:
result += "$%s\r\n%s\r\n" % (str(len(arg)), arg)
return result
if __name__ == "__main__":
try:
filename = sys.argv[1]
f = open(filename, 'r')
except IndexError:
f = sys.stdin.readlines()
for line in f:
print proto(line.rstrip().split(' ')),
|
Add Redis commands to Redis Mass Insertion script#!/usr/bin/env python
"""
redis-mass.py
~~~~~~~~~~~~~
Prepares a newline-separated file of Redis commands for mass insertion.
:copyright: (c) 2015 by Tim Simmons.
:license: BSD, see LICENSE for more details.
"""
import sys
def proto(line):
result = "*%s\r\n$%s\r\n%s\r\n" % (str(len(line)), str(len(line[0])), line[0])
for arg in line[1:]:
result += "$%s\r\n%s\r\n" % (str(len(arg)), arg)
return result
if __name__ == "__main__":
try:
filename = sys.argv[1]
f = open(filename, 'r')
except IndexError:
f = sys.stdin.readlines()
for line in f:
print proto(line.rstrip().split(' ')),
|
<commit_before><commit_msg>Add Redis commands to Redis Mass Insertion script<commit_after>#!/usr/bin/env python
"""
redis-mass.py
~~~~~~~~~~~~~
Prepares a newline-separated file of Redis commands for mass insertion.
:copyright: (c) 2015 by Tim Simmons.
:license: BSD, see LICENSE for more details.
"""
import sys
def proto(line):
result = "*%s\r\n$%s\r\n%s\r\n" % (str(len(line)), str(len(line[0])), line[0])
for arg in line[1:]:
result += "$%s\r\n%s\r\n" % (str(len(arg)), arg)
return result
if __name__ == "__main__":
try:
filename = sys.argv[1]
f = open(filename, 'r')
except IndexError:
f = sys.stdin.readlines()
for line in f:
print proto(line.rstrip().split(' ')),
|
|
fdab70f014e1afc3c387265689256bfce84437de
|
tests/functional_tests/test_errors/test_error_in_workflow.py
|
tests/functional_tests/test_errors/test_error_in_workflow.py
|
# -*- coding: utf-8 -*-
from tests.functional_tests import FunctionalTestBase, isolate
class TestErrorInWorkflow(FunctionalTestBase):
@isolate
def test_missing_primary_resource(self):
""" Should fail if a primary resource is missing"""
project = """file://B <- file://A
echo A produces B
echo B > B
"""
self.write_tuttlefile(project)
rcode, output = self.run_tuttle()
assert rcode == 2
assert output.find("Missing") >= 0, output
|
Test for missing primary input in the workflow
|
Test for missing primary input in the workflow
|
Python
|
mit
|
lexman/tuttle,lexman/tuttle,lexman/tuttle
|
Test for missing primary input in the workflow
|
# -*- coding: utf-8 -*-
from tests.functional_tests import FunctionalTestBase, isolate
class TestErrorInWorkflow(FunctionalTestBase):
@isolate
def test_missing_primary_resource(self):
""" Should fail if a primary resource is missing"""
project = """file://B <- file://A
echo A produces B
echo B > B
"""
self.write_tuttlefile(project)
rcode, output = self.run_tuttle()
assert rcode == 2
assert output.find("Missing") >= 0, output
|
<commit_before><commit_msg>Test for missing primary input in the workflow<commit_after>
|
# -*- coding: utf-8 -*-
from tests.functional_tests import FunctionalTestBase, isolate
class TestErrorInWorkflow(FunctionalTestBase):
@isolate
def test_missing_primary_resource(self):
""" Should fail if a primary resource is missing"""
project = """file://B <- file://A
echo A produces B
echo B > B
"""
self.write_tuttlefile(project)
rcode, output = self.run_tuttle()
assert rcode == 2
assert output.find("Missing") >= 0, output
|
Test for missing primary input in the workflow# -*- coding: utf-8 -*-
from tests.functional_tests import FunctionalTestBase, isolate
class TestErrorInWorkflow(FunctionalTestBase):
@isolate
def test_missing_primary_resource(self):
""" Should fail if a primary resource is missing"""
project = """file://B <- file://A
echo A produces B
echo B > B
"""
self.write_tuttlefile(project)
rcode, output = self.run_tuttle()
assert rcode == 2
assert output.find("Missing") >= 0, output
|
<commit_before><commit_msg>Test for missing primary input in the workflow<commit_after># -*- coding: utf-8 -*-
from tests.functional_tests import FunctionalTestBase, isolate
class TestErrorInWorkflow(FunctionalTestBase):
@isolate
def test_missing_primary_resource(self):
""" Should fail if a primary resource is missing"""
project = """file://B <- file://A
echo A produces B
echo B > B
"""
self.write_tuttlefile(project)
rcode, output = self.run_tuttle()
assert rcode == 2
assert output.find("Missing") >= 0, output
|
|
8e2c8d669aa547cf8749fa774160f4d05fff121a
|
database/view-table.py
|
database/view-table.py
|
import os
import sqlite3
import sys
try:
BASE_DIR = directory_name=sys.argv[1]
except:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_location = os.path.join(BASE_DIR, 'opendc.db')
conn = sqlite3.connect(db_location)
c = conn.cursor()
rows = c.execute('SELECT * FROM ' + sys.argv[2])
for row in rows:
print row
|
Add Python script to view a table in the db
|
Add Python script to view a table in the db
|
Python
|
mit
|
tudelft-atlarge/opendc,tudelft-atlarge/opendc
|
Add Python script to view a table in the db
|
import os
import sqlite3
import sys
try:
BASE_DIR = directory_name=sys.argv[1]
except:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_location = os.path.join(BASE_DIR, 'opendc.db')
conn = sqlite3.connect(db_location)
c = conn.cursor()
rows = c.execute('SELECT * FROM ' + sys.argv[2])
for row in rows:
print row
|
<commit_before><commit_msg>Add Python script to view a table in the db<commit_after>
|
import os
import sqlite3
import sys
try:
BASE_DIR = directory_name=sys.argv[1]
except:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_location = os.path.join(BASE_DIR, 'opendc.db')
conn = sqlite3.connect(db_location)
c = conn.cursor()
rows = c.execute('SELECT * FROM ' + sys.argv[2])
for row in rows:
print row
|
Add Python script to view a table in the dbimport os
import sqlite3
import sys
try:
BASE_DIR = directory_name=sys.argv[1]
except:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_location = os.path.join(BASE_DIR, 'opendc.db')
conn = sqlite3.connect(db_location)
c = conn.cursor()
rows = c.execute('SELECT * FROM ' + sys.argv[2])
for row in rows:
print row
|
<commit_before><commit_msg>Add Python script to view a table in the db<commit_after>import os
import sqlite3
import sys
try:
BASE_DIR = directory_name=sys.argv[1]
except:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_location = os.path.join(BASE_DIR, 'opendc.db')
conn = sqlite3.connect(db_location)
c = conn.cursor()
rows = c.execute('SELECT * FROM ' + sys.argv[2])
for row in rows:
print row
|
|
c58366a84f8a26051ad92d2cfbdab3480d01ad65
|
elpiwear/tag_screen.py
|
elpiwear/tag_screen.py
|
import Image
import ImageDraw
import ImageFont
import screen
def draw_rotated_text(image, text, position, angle, font, fill=(255,255,255)):
# Get rendered font width and height.
draw = ImageDraw.Draw(image)
width, height = draw.textsize(text, font=font)
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (width, height), (0,0,0,0))
# Render the text.
textdraw = ImageDraw.Draw(textimage)
textdraw.text((0,0), text, font=font, fill=fill)
# Rotate the text image.
rotated = textimage.rotate(angle, expand=1)
# Paste the text into the image, using it as a mask for transparency.
image.paste(rotated, position, rotated)
class tag_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.img.putdata([(0,0,255)]*(240*320))
self.image = Image.open('me.png')
self.image = self.image.rotate(90).resize((100,100))
self.img.paste(self.image,(0,220,100,320))
self.font = ImageFont.truetype('arial.ttf', 30)
draw_rotated_text(self.img, 'Frederic Jacob', (0, 3), 90, ImageFont.truetype('arial.ttf', 33), fill=(255,0,0))
draw_rotated_text(self.img, 'Software Engineer', (40, 30), 90, ImageFont.truetype('arial.ttf', 20), fill=(255,0,0))
draw_rotated_text(self.img, 'C/C++, Python...', (110, 130), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Linux kernel contributor', (140, 55), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Tw: @IngenieurJacob', (170, 72), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'GH: fjacob21', (200, 167), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
def update(self):
pass
|
Add the tag screen to display my personal information
|
Add the tag screen to display my personal information
|
Python
|
mit
|
fjacob21/pycon2015
|
Add the tag screen to display my personal information
|
import Image
import ImageDraw
import ImageFont
import screen
def draw_rotated_text(image, text, position, angle, font, fill=(255,255,255)):
# Get rendered font width and height.
draw = ImageDraw.Draw(image)
width, height = draw.textsize(text, font=font)
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (width, height), (0,0,0,0))
# Render the text.
textdraw = ImageDraw.Draw(textimage)
textdraw.text((0,0), text, font=font, fill=fill)
# Rotate the text image.
rotated = textimage.rotate(angle, expand=1)
# Paste the text into the image, using it as a mask for transparency.
image.paste(rotated, position, rotated)
class tag_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.img.putdata([(0,0,255)]*(240*320))
self.image = Image.open('me.png')
self.image = self.image.rotate(90).resize((100,100))
self.img.paste(self.image,(0,220,100,320))
self.font = ImageFont.truetype('arial.ttf', 30)
draw_rotated_text(self.img, 'Frederic Jacob', (0, 3), 90, ImageFont.truetype('arial.ttf', 33), fill=(255,0,0))
draw_rotated_text(self.img, 'Software Engineer', (40, 30), 90, ImageFont.truetype('arial.ttf', 20), fill=(255,0,0))
draw_rotated_text(self.img, 'C/C++, Python...', (110, 130), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Linux kernel contributor', (140, 55), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Tw: @IngenieurJacob', (170, 72), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'GH: fjacob21', (200, 167), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
def update(self):
pass
|
<commit_before><commit_msg>Add the tag screen to display my personal information<commit_after>
|
import Image
import ImageDraw
import ImageFont
import screen
def draw_rotated_text(image, text, position, angle, font, fill=(255,255,255)):
# Get rendered font width and height.
draw = ImageDraw.Draw(image)
width, height = draw.textsize(text, font=font)
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (width, height), (0,0,0,0))
# Render the text.
textdraw = ImageDraw.Draw(textimage)
textdraw.text((0,0), text, font=font, fill=fill)
# Rotate the text image.
rotated = textimage.rotate(angle, expand=1)
# Paste the text into the image, using it as a mask for transparency.
image.paste(rotated, position, rotated)
class tag_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.img.putdata([(0,0,255)]*(240*320))
self.image = Image.open('me.png')
self.image = self.image.rotate(90).resize((100,100))
self.img.paste(self.image,(0,220,100,320))
self.font = ImageFont.truetype('arial.ttf', 30)
draw_rotated_text(self.img, 'Frederic Jacob', (0, 3), 90, ImageFont.truetype('arial.ttf', 33), fill=(255,0,0))
draw_rotated_text(self.img, 'Software Engineer', (40, 30), 90, ImageFont.truetype('arial.ttf', 20), fill=(255,0,0))
draw_rotated_text(self.img, 'C/C++, Python...', (110, 130), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Linux kernel contributor', (140, 55), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Tw: @IngenieurJacob', (170, 72), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'GH: fjacob21', (200, 167), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
def update(self):
pass
|
Add the tag screen to display my personal informationimport Image
import ImageDraw
import ImageFont
import screen
def draw_rotated_text(image, text, position, angle, font, fill=(255,255,255)):
# Get rendered font width and height.
draw = ImageDraw.Draw(image)
width, height = draw.textsize(text, font=font)
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (width, height), (0,0,0,0))
# Render the text.
textdraw = ImageDraw.Draw(textimage)
textdraw.text((0,0), text, font=font, fill=fill)
# Rotate the text image.
rotated = textimage.rotate(angle, expand=1)
# Paste the text into the image, using it as a mask for transparency.
image.paste(rotated, position, rotated)
class tag_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.img.putdata([(0,0,255)]*(240*320))
self.image = Image.open('me.png')
self.image = self.image.rotate(90).resize((100,100))
self.img.paste(self.image,(0,220,100,320))
self.font = ImageFont.truetype('arial.ttf', 30)
draw_rotated_text(self.img, 'Frederic Jacob', (0, 3), 90, ImageFont.truetype('arial.ttf', 33), fill=(255,0,0))
draw_rotated_text(self.img, 'Software Engineer', (40, 30), 90, ImageFont.truetype('arial.ttf', 20), fill=(255,0,0))
draw_rotated_text(self.img, 'C/C++, Python...', (110, 130), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Linux kernel contributor', (140, 55), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Tw: @IngenieurJacob', (170, 72), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'GH: fjacob21', (200, 167), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
def update(self):
pass
|
<commit_before><commit_msg>Add the tag screen to display my personal information<commit_after>import Image
import ImageDraw
import ImageFont
import screen
def draw_rotated_text(image, text, position, angle, font, fill=(255,255,255)):
# Get rendered font width and height.
draw = ImageDraw.Draw(image)
width, height = draw.textsize(text, font=font)
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (width, height), (0,0,0,0))
# Render the text.
textdraw = ImageDraw.Draw(textimage)
textdraw.text((0,0), text, font=font, fill=fill)
# Rotate the text image.
rotated = textimage.rotate(angle, expand=1)
# Paste the text into the image, using it as a mask for transparency.
image.paste(rotated, position, rotated)
class tag_screen(screen.screen):
def __init__(self):
screen.screen.__init__(self)
self.img.putdata([(0,0,255)]*(240*320))
self.image = Image.open('me.png')
self.image = self.image.rotate(90).resize((100,100))
self.img.paste(self.image,(0,220,100,320))
self.font = ImageFont.truetype('arial.ttf', 30)
draw_rotated_text(self.img, 'Frederic Jacob', (0, 3), 90, ImageFont.truetype('arial.ttf', 33), fill=(255,0,0))
draw_rotated_text(self.img, 'Software Engineer', (40, 30), 90, ImageFont.truetype('arial.ttf', 20), fill=(255,0,0))
draw_rotated_text(self.img, 'C/C++, Python...', (110, 130), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Linux kernel contributor', (140, 55), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'Tw: @IngenieurJacob', (170, 72), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
draw_rotated_text(self.img, 'GH: fjacob21', (200, 167), 90, ImageFont.truetype('arial.ttf', 25), fill=(255,0,0))
def update(self):
pass
|
|
5121069e8a8d3047f2f8212bfc7058bf5c8731d3
|
tracpro/contacts/migrations/0012_auto_20170209_2022.py
|
tracpro/contacts/migrations/0012_auto_20170209_2022.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0011_uuid_is_unique_to_org'),
]
operations = [
migrations.AlterField(
model_name='datafield',
name='value_type',
field=models.CharField(max_length=1, verbose_name='value type', choices=[('T', 'Text'), ('N', 'Numeric'), ('D', 'Datetime'), ('S', 'State'), ('I', 'District'), ('N', 'Numeric'), ('W', 'Ward')]),
),
]
|
Add migration for previous commit
|
Add migration for previous commit
|
Python
|
bsd-3-clause
|
rapidpro/tracpro,rapidpro/tracpro,rapidpro/tracpro
|
Add migration for previous commit
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0011_uuid_is_unique_to_org'),
]
operations = [
migrations.AlterField(
model_name='datafield',
name='value_type',
field=models.CharField(max_length=1, verbose_name='value type', choices=[('T', 'Text'), ('N', 'Numeric'), ('D', 'Datetime'), ('S', 'State'), ('I', 'District'), ('N', 'Numeric'), ('W', 'Ward')]),
),
]
|
<commit_before><commit_msg>Add migration for previous commit<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0011_uuid_is_unique_to_org'),
]
operations = [
migrations.AlterField(
model_name='datafield',
name='value_type',
field=models.CharField(max_length=1, verbose_name='value type', choices=[('T', 'Text'), ('N', 'Numeric'), ('D', 'Datetime'), ('S', 'State'), ('I', 'District'), ('N', 'Numeric'), ('W', 'Ward')]),
),
]
|
Add migration for previous commit# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0011_uuid_is_unique_to_org'),
]
operations = [
migrations.AlterField(
model_name='datafield',
name='value_type',
field=models.CharField(max_length=1, verbose_name='value type', choices=[('T', 'Text'), ('N', 'Numeric'), ('D', 'Datetime'), ('S', 'State'), ('I', 'District'), ('N', 'Numeric'), ('W', 'Ward')]),
),
]
|
<commit_before><commit_msg>Add migration for previous commit<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0011_uuid_is_unique_to_org'),
]
operations = [
migrations.AlterField(
model_name='datafield',
name='value_type',
field=models.CharField(max_length=1, verbose_name='value type', choices=[('T', 'Text'), ('N', 'Numeric'), ('D', 'Datetime'), ('S', 'State'), ('I', 'District'), ('N', 'Numeric'), ('W', 'Ward')]),
),
]
|
|
f6cba028766b1b12686c515c8ffa05ffb23992d4
|
opps/views/tests/test_generic_list.py
|
opps/views/tests/test_generic_list.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.utils import timezone
from opps.articles.models import Post, Link
from opps.channels.models import Channel
class TestTemplateName(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create(
username='test@test.com',
email='test@test.com',
password=User.objects.make_random_password(),
)
self.channel = Channel.objects.create(
name='test channel',
slug='test-channel',
user=self.user,
published=True,
date_available=timezone.now(),
)
def test_get_template_name_basic(self):
response = self.client.get(self.channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
def test_get_template_name_subchannel(self):
channel = Channel.objects.create(
name='test subchannel',
slug='test-subchannel',
parent=self.channel,
user=self.user,
published=True,
date_available=timezone.now(),
)
response = self.client.get(channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/test-subchannel/list.html',
'containers/test-channel/list.html',
'containers/list.html'])
"""
def test_querystring_page_is_2(self):
response = self.client.get("{}?page=2".format(self.channel.get_absolute_url()))
self.assertTrue(response)
import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
"""
|
Write basic test on generic list views
|
Write basic test on generic list views
|
Python
|
mit
|
williamroot/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,jeanmask/opps
|
Write basic test on generic list views
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.utils import timezone
from opps.articles.models import Post, Link
from opps.channels.models import Channel
class TestTemplateName(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create(
username='test@test.com',
email='test@test.com',
password=User.objects.make_random_password(),
)
self.channel = Channel.objects.create(
name='test channel',
slug='test-channel',
user=self.user,
published=True,
date_available=timezone.now(),
)
def test_get_template_name_basic(self):
response = self.client.get(self.channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
def test_get_template_name_subchannel(self):
channel = Channel.objects.create(
name='test subchannel',
slug='test-subchannel',
parent=self.channel,
user=self.user,
published=True,
date_available=timezone.now(),
)
response = self.client.get(channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/test-subchannel/list.html',
'containers/test-channel/list.html',
'containers/list.html'])
"""
def test_querystring_page_is_2(self):
response = self.client.get("{}?page=2".format(self.channel.get_absolute_url()))
self.assertTrue(response)
import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
"""
|
<commit_before><commit_msg>Write basic test on generic list views<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.utils import timezone
from opps.articles.models import Post, Link
from opps.channels.models import Channel
class TestTemplateName(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create(
username='test@test.com',
email='test@test.com',
password=User.objects.make_random_password(),
)
self.channel = Channel.objects.create(
name='test channel',
slug='test-channel',
user=self.user,
published=True,
date_available=timezone.now(),
)
def test_get_template_name_basic(self):
response = self.client.get(self.channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
def test_get_template_name_subchannel(self):
channel = Channel.objects.create(
name='test subchannel',
slug='test-subchannel',
parent=self.channel,
user=self.user,
published=True,
date_available=timezone.now(),
)
response = self.client.get(channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/test-subchannel/list.html',
'containers/test-channel/list.html',
'containers/list.html'])
"""
def test_querystring_page_is_2(self):
response = self.client.get("{}?page=2".format(self.channel.get_absolute_url()))
self.assertTrue(response)
import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
"""
|
Write basic test on generic list views#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.utils import timezone
from opps.articles.models import Post, Link
from opps.channels.models import Channel
class TestTemplateName(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create(
username='test@test.com',
email='test@test.com',
password=User.objects.make_random_password(),
)
self.channel = Channel.objects.create(
name='test channel',
slug='test-channel',
user=self.user,
published=True,
date_available=timezone.now(),
)
def test_get_template_name_basic(self):
response = self.client.get(self.channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
def test_get_template_name_subchannel(self):
channel = Channel.objects.create(
name='test subchannel',
slug='test-subchannel',
parent=self.channel,
user=self.user,
published=True,
date_available=timezone.now(),
)
response = self.client.get(channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/test-subchannel/list.html',
'containers/test-channel/list.html',
'containers/list.html'])
"""
def test_querystring_page_is_2(self):
response = self.client.get("{}?page=2".format(self.channel.get_absolute_url()))
self.assertTrue(response)
import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
"""
|
<commit_before><commit_msg>Write basic test on generic list views<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.utils import timezone
from opps.articles.models import Post, Link
from opps.channels.models import Channel
class TestTemplateName(TestCase):
def setUp(self):
self.client = Client()
self.user = User.objects.create(
username='test@test.com',
email='test@test.com',
password=User.objects.make_random_password(),
)
self.channel = Channel.objects.create(
name='test channel',
slug='test-channel',
user=self.user,
published=True,
date_available=timezone.now(),
)
def test_get_template_name_basic(self):
response = self.client.get(self.channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
def test_get_template_name_subchannel(self):
channel = Channel.objects.create(
name='test subchannel',
slug='test-subchannel',
parent=self.channel,
user=self.user,
published=True,
date_available=timezone.now(),
)
response = self.client.get(channel.get_absolute_url())
self.assertTrue(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/test-subchannel/list.html',
'containers/test-channel/list.html',
'containers/list.html'])
"""
def test_querystring_page_is_2(self):
response = self.client.get("{}?page=2".format(self.channel.get_absolute_url()))
self.assertTrue(response)
import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.template_name,
['containers/test-channel/list.html',
'containers/list.html'])
"""
|
|
06e3e6642cfae2d7b4616609cad1944797586263
|
dev_tools/src/d1_dev/update-requirements-txt.py
|
dev_tools/src/d1_dev/update-requirements-txt.py
|
#!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
|
Add script that creates a filtered list of required packages
|
Add script that creates a filtered list of required packages
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
Add script that creates a filtered list of required packages
|
#!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script that creates a filtered list of required packages<commit_after>
|
#!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
|
Add script that creates a filtered list of required packages#!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script that creates a filtered list of required packages<commit_after>#!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
|
|
936cdf5d4115026a357ae4f8cd93479e98a46414
|
catkin/src/appctl/scripts/evdev_teleport_switcher.py
|
catkin/src/appctl/scripts/evdev_teleport_switcher.py
|
#!/usr/bin/env python
"""
This node listens for appctl/Mode changes and toggles an evdev_teleport
receiver accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Bool
class ModeHandler:
def __init__(self, modes, activation_pub):
self.modes = modes
self.activation_pub = activation_pub
def handle_msg(self, msg):
if msg.mode in self.modes:
self.activation_pub.publish(data=True)
else:
self.activation_pub.publish(data=False)
def main():
rospy.init_node('evdev_teleport_switcher', anonymous=True)
modes = rospy.get_param('~modes').split(',')
activation_node = '/evdev_teleport/activation/{}'.format(
rospy.get_param('~activation_node')
)
activation_pub = rospy.Publisher(
activation_node,
Bool,
queue_size = 1,
latch = True
)
mode_handler = ModeHandler(modes, activation_pub)
mode_sub = rospy.Subscriber('/appctl/mode', Mode, mode_handler.handle_msg)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Add appctl/Mode to evdev_teleport receiver bridge
|
Add appctl/Mode to evdev_teleport receiver bridge
|
Python
|
apache-2.0
|
EndPointCorp/appctl,EndPointCorp/appctl
|
Add appctl/Mode to evdev_teleport receiver bridge
|
#!/usr/bin/env python
"""
This node listens for appctl/Mode changes and toggles an evdev_teleport
receiver accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Bool
class ModeHandler:
def __init__(self, modes, activation_pub):
self.modes = modes
self.activation_pub = activation_pub
def handle_msg(self, msg):
if msg.mode in self.modes:
self.activation_pub.publish(data=True)
else:
self.activation_pub.publish(data=False)
def main():
rospy.init_node('evdev_teleport_switcher', anonymous=True)
modes = rospy.get_param('~modes').split(',')
activation_node = '/evdev_teleport/activation/{}'.format(
rospy.get_param('~activation_node')
)
activation_pub = rospy.Publisher(
activation_node,
Bool,
queue_size = 1,
latch = True
)
mode_handler = ModeHandler(modes, activation_pub)
mode_sub = rospy.Subscriber('/appctl/mode', Mode, mode_handler.handle_msg)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before><commit_msg>Add appctl/Mode to evdev_teleport receiver bridge<commit_after>
|
#!/usr/bin/env python
"""
This node listens for appctl/Mode changes and toggles an evdev_teleport
receiver accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Bool
class ModeHandler:
def __init__(self, modes, activation_pub):
self.modes = modes
self.activation_pub = activation_pub
def handle_msg(self, msg):
if msg.mode in self.modes:
self.activation_pub.publish(data=True)
else:
self.activation_pub.publish(data=False)
def main():
rospy.init_node('evdev_teleport_switcher', anonymous=True)
modes = rospy.get_param('~modes').split(',')
activation_node = '/evdev_teleport/activation/{}'.format(
rospy.get_param('~activation_node')
)
activation_pub = rospy.Publisher(
activation_node,
Bool,
queue_size = 1,
latch = True
)
mode_handler = ModeHandler(modes, activation_pub)
mode_sub = rospy.Subscriber('/appctl/mode', Mode, mode_handler.handle_msg)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Add appctl/Mode to evdev_teleport receiver bridge#!/usr/bin/env python
"""
This node listens for appctl/Mode changes and toggles an evdev_teleport
receiver accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Bool
class ModeHandler:
def __init__(self, modes, activation_pub):
self.modes = modes
self.activation_pub = activation_pub
def handle_msg(self, msg):
if msg.mode in self.modes:
self.activation_pub.publish(data=True)
else:
self.activation_pub.publish(data=False)
def main():
rospy.init_node('evdev_teleport_switcher', anonymous=True)
modes = rospy.get_param('~modes').split(',')
activation_node = '/evdev_teleport/activation/{}'.format(
rospy.get_param('~activation_node')
)
activation_pub = rospy.Publisher(
activation_node,
Bool,
queue_size = 1,
latch = True
)
mode_handler = ModeHandler(modes, activation_pub)
mode_sub = rospy.Subscriber('/appctl/mode', Mode, mode_handler.handle_msg)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before><commit_msg>Add appctl/Mode to evdev_teleport receiver bridge<commit_after>#!/usr/bin/env python
"""
This node listens for appctl/Mode changes and toggles an evdev_teleport
receiver accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Bool
class ModeHandler:
def __init__(self, modes, activation_pub):
self.modes = modes
self.activation_pub = activation_pub
def handle_msg(self, msg):
if msg.mode in self.modes:
self.activation_pub.publish(data=True)
else:
self.activation_pub.publish(data=False)
def main():
rospy.init_node('evdev_teleport_switcher', anonymous=True)
modes = rospy.get_param('~modes').split(',')
activation_node = '/evdev_teleport/activation/{}'.format(
rospy.get_param('~activation_node')
)
activation_pub = rospy.Publisher(
activation_node,
Bool,
queue_size = 1,
latch = True
)
mode_handler = ModeHandler(modes, activation_pub)
mode_sub = rospy.Subscriber('/appctl/mode', Mode, mode_handler.handle_msg)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
|
b4df967c39558dfc86e01aae9b84e3df25464d98
|
locationhash/tests/test_mongodb.py
|
locationhash/tests/test_mongodb.py
|
# encoding: utf-8
"""
Copyright 2011 Elliot Murphy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
from pymongo import Connection
LOCATIONS = [
(37.58, 23.43), # Athens, Greece
(39.55, 116.25), # Beijing, China
(-33.55, 18.22), # Cape Town, South Africa
(53.20, -6.15), # Dublin, Ireland
(24.33, 81.48), # Key West, Florida
]
class MongoDBTests(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.db = self.connection.test_database
def tearDown(self):
self.connection.drop_database(self.db)
def test_inserting_locations(self):
locations = self.db.locations
for l in LOCATIONS:
locations.insert({'latitude': l[0], 'longitude': l[1]})
self.assertEqual(len(LOCATIONS), locations.count())
|
Write some locations to MongoDb
|
Write some locations to MongoDb
|
Python
|
apache-2.0
|
statik/locationhash,statik/locationhash
|
Write some locations to MongoDb
|
# encoding: utf-8
"""
Copyright 2011 Elliot Murphy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
from pymongo import Connection
LOCATIONS = [
(37.58, 23.43), # Athens, Greece
(39.55, 116.25), # Beijing, China
(-33.55, 18.22), # Cape Town, South Africa
(53.20, -6.15), # Dublin, Ireland
(24.33, 81.48), # Key West, Florida
]
class MongoDBTests(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.db = self.connection.test_database
def tearDown(self):
self.connection.drop_database(self.db)
def test_inserting_locations(self):
locations = self.db.locations
for l in LOCATIONS:
locations.insert({'latitude': l[0], 'longitude': l[1]})
self.assertEqual(len(LOCATIONS), locations.count())
|
<commit_before><commit_msg>Write some locations to MongoDb<commit_after>
|
# encoding: utf-8
"""
Copyright 2011 Elliot Murphy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
from pymongo import Connection
LOCATIONS = [
(37.58, 23.43), # Athens, Greece
(39.55, 116.25), # Beijing, China
(-33.55, 18.22), # Cape Town, South Africa
(53.20, -6.15), # Dublin, Ireland
(24.33, 81.48), # Key West, Florida
]
class MongoDBTests(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.db = self.connection.test_database
def tearDown(self):
self.connection.drop_database(self.db)
def test_inserting_locations(self):
locations = self.db.locations
for l in LOCATIONS:
locations.insert({'latitude': l[0], 'longitude': l[1]})
self.assertEqual(len(LOCATIONS), locations.count())
|
Write some locations to MongoDb# encoding: utf-8
"""
Copyright 2011 Elliot Murphy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
from pymongo import Connection
LOCATIONS = [
(37.58, 23.43), # Athens, Greece
(39.55, 116.25), # Beijing, China
(-33.55, 18.22), # Cape Town, South Africa
(53.20, -6.15), # Dublin, Ireland
(24.33, 81.48), # Key West, Florida
]
class MongoDBTests(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.db = self.connection.test_database
def tearDown(self):
self.connection.drop_database(self.db)
def test_inserting_locations(self):
locations = self.db.locations
for l in LOCATIONS:
locations.insert({'latitude': l[0], 'longitude': l[1]})
self.assertEqual(len(LOCATIONS), locations.count())
|
<commit_before><commit_msg>Write some locations to MongoDb<commit_after># encoding: utf-8
"""
Copyright 2011 Elliot Murphy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest2 as unittest
from pymongo import Connection
LOCATIONS = [
(37.58, 23.43), # Athens, Greece
(39.55, 116.25), # Beijing, China
(-33.55, 18.22), # Cape Town, South Africa
(53.20, -6.15), # Dublin, Ireland
(24.33, 81.48), # Key West, Florida
]
class MongoDBTests(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.db = self.connection.test_database
def tearDown(self):
self.connection.drop_database(self.db)
def test_inserting_locations(self):
locations = self.db.locations
for l in LOCATIONS:
locations.insert({'latitude': l[0], 'longitude': l[1]})
self.assertEqual(len(LOCATIONS), locations.count())
|
|
be4dcfab3c07e599539209ff2656c09d551dd0eb
|
CLI/str2hex.py
|
CLI/str2hex.py
|
#!/usr/bin/python
import sys
if len(sys.argv) < 2:
print('Incorrect usage')
exit(1)
for char in sys.argv[1]:
sys.stdout.write('\\x' + char.encode('hex'))
|
Add str to hex utility
|
Add str to hex utility
|
Python
|
mit
|
reykjalin/tools,reykjalin/tools
|
Add str to hex utility
|
#!/usr/bin/python
import sys
if len(sys.argv) < 2:
print('Incorrect usage')
exit(1)
for char in sys.argv[1]:
sys.stdout.write('\\x' + char.encode('hex'))
|
<commit_before><commit_msg>Add str to hex utility<commit_after>
|
#!/usr/bin/python
import sys
if len(sys.argv) < 2:
print('Incorrect usage')
exit(1)
for char in sys.argv[1]:
sys.stdout.write('\\x' + char.encode('hex'))
|
Add str to hex utility#!/usr/bin/python
import sys
if len(sys.argv) < 2:
print('Incorrect usage')
exit(1)
for char in sys.argv[1]:
sys.stdout.write('\\x' + char.encode('hex'))
|
<commit_before><commit_msg>Add str to hex utility<commit_after>#!/usr/bin/python
import sys
if len(sys.argv) < 2:
print('Incorrect usage')
exit(1)
for char in sys.argv[1]:
sys.stdout.write('\\x' + char.encode('hex'))
|
|
4d837b2e6e183ad34917248bbe36b0c39281be9c
|
tests/settler_unit_test.py
|
tests/settler_unit_test.py
|
__author__ = 'QSG'
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
# from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.common.by import By
class SettlerTutorial(unittest.TestCase):
def setUp(self):
self.driver=webdriver.Firefox()
def test_tutorial(self):
driver=self.driver
driver.get("https://capstone-settlers.herokuapp.com/")
self.assertIn('Settlers of Massey', driver.title)
eles=driver.find_elements_by_class_name('start_text')
ele_tutorial=eles[1]
ele_tutorial.click()
wait=WebDriverWait(driver,10)
title=wait.until(lambda driver: driver.find_element_by_tag_name('h1'))
self.assertEqual(title.text,'Tutorial!')
def tearDown(self):
self.driver.close()
if __name__=="__main__":
unittest.main()
|
Add a manul python test file.
|
Add a manul python test file.
|
Python
|
mpl-2.0
|
159356-1702-Extramural/capstone,159356-1702-Extramural/capstone,159356-1702-Extramural/capstone,159356-1702-Extramural/capstone
|
Add a manul python test file.
|
__author__ = 'QSG'
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
# from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.common.by import By
class SettlerTutorial(unittest.TestCase):
def setUp(self):
self.driver=webdriver.Firefox()
def test_tutorial(self):
driver=self.driver
driver.get("https://capstone-settlers.herokuapp.com/")
self.assertIn('Settlers of Massey', driver.title)
eles=driver.find_elements_by_class_name('start_text')
ele_tutorial=eles[1]
ele_tutorial.click()
wait=WebDriverWait(driver,10)
title=wait.until(lambda driver: driver.find_element_by_tag_name('h1'))
self.assertEqual(title.text,'Tutorial!')
def tearDown(self):
self.driver.close()
if __name__=="__main__":
unittest.main()
|
<commit_before><commit_msg>Add a manul python test file.<commit_after>
|
__author__ = 'QSG'
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
# from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.common.by import By
class SettlerTutorial(unittest.TestCase):
def setUp(self):
self.driver=webdriver.Firefox()
def test_tutorial(self):
driver=self.driver
driver.get("https://capstone-settlers.herokuapp.com/")
self.assertIn('Settlers of Massey', driver.title)
eles=driver.find_elements_by_class_name('start_text')
ele_tutorial=eles[1]
ele_tutorial.click()
wait=WebDriverWait(driver,10)
title=wait.until(lambda driver: driver.find_element_by_tag_name('h1'))
self.assertEqual(title.text,'Tutorial!')
def tearDown(self):
self.driver.close()
if __name__=="__main__":
unittest.main()
|
Add a manul python test file.__author__ = 'QSG'
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
# from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.common.by import By
class SettlerTutorial(unittest.TestCase):
def setUp(self):
self.driver=webdriver.Firefox()
def test_tutorial(self):
driver=self.driver
driver.get("https://capstone-settlers.herokuapp.com/")
self.assertIn('Settlers of Massey', driver.title)
eles=driver.find_elements_by_class_name('start_text')
ele_tutorial=eles[1]
ele_tutorial.click()
wait=WebDriverWait(driver,10)
title=wait.until(lambda driver: driver.find_element_by_tag_name('h1'))
self.assertEqual(title.text,'Tutorial!')
def tearDown(self):
self.driver.close()
if __name__=="__main__":
unittest.main()
|
<commit_before><commit_msg>Add a manul python test file.<commit_after>__author__ = 'QSG'
import unittest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
# from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.common.by import By
class SettlerTutorial(unittest.TestCase):
def setUp(self):
self.driver=webdriver.Firefox()
def test_tutorial(self):
driver=self.driver
driver.get("https://capstone-settlers.herokuapp.com/")
self.assertIn('Settlers of Massey', driver.title)
eles=driver.find_elements_by_class_name('start_text')
ele_tutorial=eles[1]
ele_tutorial.click()
wait=WebDriverWait(driver,10)
title=wait.until(lambda driver: driver.find_element_by_tag_name('h1'))
self.assertEqual(title.text,'Tutorial!')
def tearDown(self):
self.driver.close()
if __name__=="__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.