commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
29b3402ef9339971005c36057512b2d9916947cb
|
challenges/01-Intro/test_data_and_variables.py
|
challenges/01-Intro/test_data_and_variables.py
|
# This first example lifted from http://pytest.org/latest/getting-started.html
def func(x):
return x + 1
def test_answer():
assert func(3) == 5
|
Set up our first exmample challenge
|
Set up our first exmample challenge
Works with py.test or nose, py.test is prettier
|
Python
|
apache-2.0
|
marwahaha/python-fundamentals,marwahaha/python-fundamentals,Destaneon/python-fundamentals,RaoUmer/python-fundamentals,RaoUmer/python-fundamentals,RaoUmer/python-fundamentals,Destaneon/python-fundamentals,marwahaha/python-fundamentals,Destaneon/python-fundamentals
|
Set up our first exmample challenge
Works with py.test or nose, py.test is prettier
|
# This first example lifted from http://pytest.org/latest/getting-started.html
def func(x):
return x + 1
def test_answer():
assert func(3) == 5
|
<commit_before><commit_msg>Set up our first exmample challenge
Works with py.test or nose, py.test is prettier<commit_after>
|
# This first example lifted from http://pytest.org/latest/getting-started.html
def func(x):
return x + 1
def test_answer():
assert func(3) == 5
|
Set up our first exmample challenge
Works with py.test or nose, py.test is prettier# This first example lifted from http://pytest.org/latest/getting-started.html
def func(x):
return x + 1
def test_answer():
assert func(3) == 5
|
<commit_before><commit_msg>Set up our first exmample challenge
Works with py.test or nose, py.test is prettier<commit_after># This first example lifted from http://pytest.org/latest/getting-started.html
def func(x):
return x + 1
def test_answer():
assert func(3) == 5
|
|
53288a2e49e468ced457b821691207565e739118
|
CodeFights/memoryPills.py
|
CodeFights/memoryPills.py
|
#!/usr/local/bin/python
# Code Fights Mirror Bits (Core) Problem
from itertools import dropwhile
def memoryPills(pills):
gen = dropwhile(lambda p: len(p) % 2 == 1, pills + [""] * 3)
next(gen)
return [next(gen) for _ in range(3)]
def main():
tests = [
[["Notforgetan", "Antimoron", "Rememberin", "Bestmedicen",
"Superpillsus"], ["Bestmedicen", "Superpillsus", ""]],
[["Pillin"], ["", "", ""]],
[["Med 1", "Med 2", "Med 3", "Med 10", "Med 11", "Med 12", "Med 14",
"Med 42", "Med 239"], ["Med 11", "Med 12", "Med 14"]],
[["Pills", "Shmills", "Medicine", "Phedicine", "Hey", "Hoy"],
["Phedicine", "Hey", "Hoy"]],
[["Test", "Where", "The", "First", "Element", "Is", "Even"],
["Where", "The", "First"]]
]
for t in tests:
res = memoryPills(t[0])
if t[1] == res:
print("PASSED: memoryPills({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: memoryPills({}) returned {},"
"answer: {}").format(t[0], res, t[1]))
if __name__ == '__main__':
main()
|
Solve Code Fights memory pills problem
|
Solve Code Fights memory pills problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights memory pills problem
|
#!/usr/local/bin/python
# Code Fights Mirror Bits (Core) Problem
from itertools import dropwhile
def memoryPills(pills):
gen = dropwhile(lambda p: len(p) % 2 == 1, pills + [""] * 3)
next(gen)
return [next(gen) for _ in range(3)]
def main():
tests = [
[["Notforgetan", "Antimoron", "Rememberin", "Bestmedicen",
"Superpillsus"], ["Bestmedicen", "Superpillsus", ""]],
[["Pillin"], ["", "", ""]],
[["Med 1", "Med 2", "Med 3", "Med 10", "Med 11", "Med 12", "Med 14",
"Med 42", "Med 239"], ["Med 11", "Med 12", "Med 14"]],
[["Pills", "Shmills", "Medicine", "Phedicine", "Hey", "Hoy"],
["Phedicine", "Hey", "Hoy"]],
[["Test", "Where", "The", "First", "Element", "Is", "Even"],
["Where", "The", "First"]]
]
for t in tests:
res = memoryPills(t[0])
if t[1] == res:
print("PASSED: memoryPills({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: memoryPills({}) returned {},"
"answer: {}").format(t[0], res, t[1]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights memory pills problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Mirror Bits (Core) Problem
from itertools import dropwhile
def memoryPills(pills):
gen = dropwhile(lambda p: len(p) % 2 == 1, pills + [""] * 3)
next(gen)
return [next(gen) for _ in range(3)]
def main():
tests = [
[["Notforgetan", "Antimoron", "Rememberin", "Bestmedicen",
"Superpillsus"], ["Bestmedicen", "Superpillsus", ""]],
[["Pillin"], ["", "", ""]],
[["Med 1", "Med 2", "Med 3", "Med 10", "Med 11", "Med 12", "Med 14",
"Med 42", "Med 239"], ["Med 11", "Med 12", "Med 14"]],
[["Pills", "Shmills", "Medicine", "Phedicine", "Hey", "Hoy"],
["Phedicine", "Hey", "Hoy"]],
[["Test", "Where", "The", "First", "Element", "Is", "Even"],
["Where", "The", "First"]]
]
for t in tests:
res = memoryPills(t[0])
if t[1] == res:
print("PASSED: memoryPills({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: memoryPills({}) returned {},"
"answer: {}").format(t[0], res, t[1]))
if __name__ == '__main__':
main()
|
Solve Code Fights memory pills problem#!/usr/local/bin/python
# Code Fights Mirror Bits (Core) Problem
from itertools import dropwhile
def memoryPills(pills):
gen = dropwhile(lambda p: len(p) % 2 == 1, pills + [""] * 3)
next(gen)
return [next(gen) for _ in range(3)]
def main():
tests = [
[["Notforgetan", "Antimoron", "Rememberin", "Bestmedicen",
"Superpillsus"], ["Bestmedicen", "Superpillsus", ""]],
[["Pillin"], ["", "", ""]],
[["Med 1", "Med 2", "Med 3", "Med 10", "Med 11", "Med 12", "Med 14",
"Med 42", "Med 239"], ["Med 11", "Med 12", "Med 14"]],
[["Pills", "Shmills", "Medicine", "Phedicine", "Hey", "Hoy"],
["Phedicine", "Hey", "Hoy"]],
[["Test", "Where", "The", "First", "Element", "Is", "Even"],
["Where", "The", "First"]]
]
for t in tests:
res = memoryPills(t[0])
if t[1] == res:
print("PASSED: memoryPills({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: memoryPills({}) returned {},"
"answer: {}").format(t[0], res, t[1]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights memory pills problem<commit_after>#!/usr/local/bin/python
# Code Fights Mirror Bits (Core) Problem
from itertools import dropwhile
def memoryPills(pills):
gen = dropwhile(lambda p: len(p) % 2 == 1, pills + [""] * 3)
next(gen)
return [next(gen) for _ in range(3)]
def main():
tests = [
[["Notforgetan", "Antimoron", "Rememberin", "Bestmedicen",
"Superpillsus"], ["Bestmedicen", "Superpillsus", ""]],
[["Pillin"], ["", "", ""]],
[["Med 1", "Med 2", "Med 3", "Med 10", "Med 11", "Med 12", "Med 14",
"Med 42", "Med 239"], ["Med 11", "Med 12", "Med 14"]],
[["Pills", "Shmills", "Medicine", "Phedicine", "Hey", "Hoy"],
["Phedicine", "Hey", "Hoy"]],
[["Test", "Where", "The", "First", "Element", "Is", "Even"],
["Where", "The", "First"]]
]
for t in tests:
res = memoryPills(t[0])
if t[1] == res:
print("PASSED: memoryPills({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: memoryPills({}) returned {},"
"answer: {}").format(t[0], res, t[1]))
if __name__ == '__main__':
main()
|
|
7dcba6a8be7bd87dd377bd3f99e8e2293bb6632d
|
django/santropolFeast/member/migrations/0008_auto_20160727_2251.py
|
django/santropolFeast/member/migrations/0008_auto_20160727_2251.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-27 22:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('member', '0007_auto_20160726_1703'),
]
operations = [
migrations.RenameField(
model_name='address',
old_name='lat',
new_name='latitude',
),
migrations.RenameField(
model_name='address',
old_name='lon',
new_name='longitude',
),
]
|
Add missing migration file for latitude and longitude
|
Add missing migration file for latitude and longitude
|
Python
|
agpl-3.0
|
madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef
|
Add missing migration file for latitude and longitude
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-27 22:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('member', '0007_auto_20160726_1703'),
]
operations = [
migrations.RenameField(
model_name='address',
old_name='lat',
new_name='latitude',
),
migrations.RenameField(
model_name='address',
old_name='lon',
new_name='longitude',
),
]
|
<commit_before><commit_msg>Add missing migration file for latitude and longitude<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-27 22:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('member', '0007_auto_20160726_1703'),
]
operations = [
migrations.RenameField(
model_name='address',
old_name='lat',
new_name='latitude',
),
migrations.RenameField(
model_name='address',
old_name='lon',
new_name='longitude',
),
]
|
Add missing migration file for latitude and longitude# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-27 22:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('member', '0007_auto_20160726_1703'),
]
operations = [
migrations.RenameField(
model_name='address',
old_name='lat',
new_name='latitude',
),
migrations.RenameField(
model_name='address',
old_name='lon',
new_name='longitude',
),
]
|
<commit_before><commit_msg>Add missing migration file for latitude and longitude<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-27 22:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('member', '0007_auto_20160726_1703'),
]
operations = [
migrations.RenameField(
model_name='address',
old_name='lat',
new_name='latitude',
),
migrations.RenameField(
model_name='address',
old_name='lon',
new_name='longitude',
),
]
|
|
cec23490209e952d1b9b764714cf950108443d2c
|
ovp_users/migrations/0013_auto_20170208_2118.py
|
ovp_users/migrations/0013_auto_20170208_2118.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0012_merge_20170112_2144'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=190, unique=True, verbose_name='Email'),
),
]
|
Add missing migration for last commit
|
Add missing migration for last commit
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users
|
Add missing migration for last commit
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0012_merge_20170112_2144'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=190, unique=True, verbose_name='Email'),
),
]
|
<commit_before><commit_msg>Add missing migration for last commit<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0012_merge_20170112_2144'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=190, unique=True, verbose_name='Email'),
),
]
|
Add missing migration for last commit# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0012_merge_20170112_2144'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=190, unique=True, verbose_name='Email'),
),
]
|
<commit_before><commit_msg>Add missing migration for last commit<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-08 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0012_merge_20170112_2144'),
]
operations = [
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=190, unique=True, verbose_name='Email'),
),
]
|
|
3517787a7e9c18ec2088bf0a58913ea99ce94fb1
|
tests/test_template_newrequest.py
|
tests/test_template_newrequest.py
|
import testing as T
class NewRequestTemplateTest(T.TemplateTestCase):
authenticated = True
newrequest_page = 'modules/newrequest.html'
form_elements = ['title', 'tags', 'review', 'repo', 'branch', 'description', 'comments', 'watchers']
def test_request_form_labels(self):
tree = self.render_etree(self.newrequest_page)
for_attr = ['request-form-%s' % elem for elem in self.form_elements]
found_labels = []
for label in tree.iter('label'):
found_labels.append(label.attrib['for'])
T.assert_sorted_equal(for_attr, found_labels)
def test_request_form_input(self):
tree = self.render_etree(self.newrequest_page)
id_attr = ['request-form-%s' % elem for elem in self.form_elements]
name_attr = ['request-%s' % elem for elem in self.form_elements]
found_id = []
found_name = []
for field in tree.iter('input'):
if 'type' not in field.attrib: # ignore hidden/submit
found_id.append(field.attrib['id'])
found_name.append(field.attrib['name'])
for textarea in tree.iter('textarea'):
found_id.append(textarea.attrib['id'])
found_name.append(textarea.attrib['name'])
T.assert_sorted_equal(id_attr, found_id)
T.assert_sorted_equal(name_attr, found_name)
if __name__ == '__main__':
T.run()
|
Add test for labels/inputs in newrequest form
|
Add test for labels/inputs in newrequest form
|
Python
|
apache-2.0
|
bis12/pushmanager,asottile/pushmanager,imbstack/pushmanager,asottile/pushmanager,bchess/pushmanager,bis12/pushmanager,imbstack/pushmanager,Yelp/pushmanager,bis12/pushmanager,bchess/pushmanager,imbstack/pushmanager,YelpArchive/pushmanager,Yelp/pushmanager,YelpArchive/pushmanager,Yelp/pushmanager,asottile/pushmanager,Yelp/pushmanager,bchess/pushmanager,YelpArchive/pushmanager,YelpArchive/pushmanager
|
Add test for labels/inputs in newrequest form
|
import testing as T
class NewRequestTemplateTest(T.TemplateTestCase):
authenticated = True
newrequest_page = 'modules/newrequest.html'
form_elements = ['title', 'tags', 'review', 'repo', 'branch', 'description', 'comments', 'watchers']
def test_request_form_labels(self):
tree = self.render_etree(self.newrequest_page)
for_attr = ['request-form-%s' % elem for elem in self.form_elements]
found_labels = []
for label in tree.iter('label'):
found_labels.append(label.attrib['for'])
T.assert_sorted_equal(for_attr, found_labels)
def test_request_form_input(self):
tree = self.render_etree(self.newrequest_page)
id_attr = ['request-form-%s' % elem for elem in self.form_elements]
name_attr = ['request-%s' % elem for elem in self.form_elements]
found_id = []
found_name = []
for field in tree.iter('input'):
if 'type' not in field.attrib: # ignore hidden/submit
found_id.append(field.attrib['id'])
found_name.append(field.attrib['name'])
for textarea in tree.iter('textarea'):
found_id.append(textarea.attrib['id'])
found_name.append(textarea.attrib['name'])
T.assert_sorted_equal(id_attr, found_id)
T.assert_sorted_equal(name_attr, found_name)
if __name__ == '__main__':
T.run()
|
<commit_before><commit_msg>Add test for labels/inputs in newrequest form<commit_after>
|
import testing as T
class NewRequestTemplateTest(T.TemplateTestCase):
authenticated = True
newrequest_page = 'modules/newrequest.html'
form_elements = ['title', 'tags', 'review', 'repo', 'branch', 'description', 'comments', 'watchers']
def test_request_form_labels(self):
tree = self.render_etree(self.newrequest_page)
for_attr = ['request-form-%s' % elem for elem in self.form_elements]
found_labels = []
for label in tree.iter('label'):
found_labels.append(label.attrib['for'])
T.assert_sorted_equal(for_attr, found_labels)
def test_request_form_input(self):
tree = self.render_etree(self.newrequest_page)
id_attr = ['request-form-%s' % elem for elem in self.form_elements]
name_attr = ['request-%s' % elem for elem in self.form_elements]
found_id = []
found_name = []
for field in tree.iter('input'):
if 'type' not in field.attrib: # ignore hidden/submit
found_id.append(field.attrib['id'])
found_name.append(field.attrib['name'])
for textarea in tree.iter('textarea'):
found_id.append(textarea.attrib['id'])
found_name.append(textarea.attrib['name'])
T.assert_sorted_equal(id_attr, found_id)
T.assert_sorted_equal(name_attr, found_name)
if __name__ == '__main__':
T.run()
|
Add test for labels/inputs in newrequest formimport testing as T
class NewRequestTemplateTest(T.TemplateTestCase):
authenticated = True
newrequest_page = 'modules/newrequest.html'
form_elements = ['title', 'tags', 'review', 'repo', 'branch', 'description', 'comments', 'watchers']
def test_request_form_labels(self):
tree = self.render_etree(self.newrequest_page)
for_attr = ['request-form-%s' % elem for elem in self.form_elements]
found_labels = []
for label in tree.iter('label'):
found_labels.append(label.attrib['for'])
T.assert_sorted_equal(for_attr, found_labels)
def test_request_form_input(self):
tree = self.render_etree(self.newrequest_page)
id_attr = ['request-form-%s' % elem for elem in self.form_elements]
name_attr = ['request-%s' % elem for elem in self.form_elements]
found_id = []
found_name = []
for field in tree.iter('input'):
if 'type' not in field.attrib: # ignore hidden/submit
found_id.append(field.attrib['id'])
found_name.append(field.attrib['name'])
for textarea in tree.iter('textarea'):
found_id.append(textarea.attrib['id'])
found_name.append(textarea.attrib['name'])
T.assert_sorted_equal(id_attr, found_id)
T.assert_sorted_equal(name_attr, found_name)
if __name__ == '__main__':
T.run()
|
<commit_before><commit_msg>Add test for labels/inputs in newrequest form<commit_after>import testing as T
class NewRequestTemplateTest(T.TemplateTestCase):
authenticated = True
newrequest_page = 'modules/newrequest.html'
form_elements = ['title', 'tags', 'review', 'repo', 'branch', 'description', 'comments', 'watchers']
def test_request_form_labels(self):
tree = self.render_etree(self.newrequest_page)
for_attr = ['request-form-%s' % elem for elem in self.form_elements]
found_labels = []
for label in tree.iter('label'):
found_labels.append(label.attrib['for'])
T.assert_sorted_equal(for_attr, found_labels)
def test_request_form_input(self):
tree = self.render_etree(self.newrequest_page)
id_attr = ['request-form-%s' % elem for elem in self.form_elements]
name_attr = ['request-%s' % elem for elem in self.form_elements]
found_id = []
found_name = []
for field in tree.iter('input'):
if 'type' not in field.attrib: # ignore hidden/submit
found_id.append(field.attrib['id'])
found_name.append(field.attrib['name'])
for textarea in tree.iter('textarea'):
found_id.append(textarea.attrib['id'])
found_name.append(textarea.attrib['name'])
T.assert_sorted_equal(id_attr, found_id)
T.assert_sorted_equal(name_attr, found_name)
if __name__ == '__main__':
T.run()
|
|
3133469874da37afcbf6e75d9afec4daae7a4c9e
|
scikits/learn/benchmarks/bench_plot_balltree.py
|
scikits/learn/benchmarks/bench_plot_balltree.py
|
from scikits.learn.BallTree import BallTree, knn_brute
import numpy as np
from time import time
from scipy.spatial import cKDTree
import sys
import pylab as pl
def compare_nbrs(nbrs1,nbrs2):
assert nbrs1.shape == nbrs2.shape
if(nbrs1.ndim == 2):
N,k = nbrs1.shape
for i in range(N):
for j in range(k):
if nbrs1[i,j]==i:
continue
elif nbrs1[i,j] not in nbrs2[i]:
return False
return True
elif(nbrs1.ndim == 1):
N = len(nbrs1)
return numpy.all(nbrs1 == nbrs2)
N = 1000
ls = 1 # leaf size
k = 20
BT_results = []
KDT_results = []
for i in range(1, 10):
print 'Iteration %s' %i
D = i*100
M = np.random.random([N, D])
t0 = time()
BT = BallTree(M, ls)
d, nbrs1 = BT.query(M, k)
delta = time() - t0
BT_results.append(delta)
t0 = time()
KDT = cKDTree(M, ls)
d, nbrs2 = KDT.query(M, k)
delta = time() - t0
KDT_results.append(delta)
# this checks we get the correct result
assert compare_nbrs(nbrs1,nbrs2)
xx = 100*np.arange(1, 10)
pl.plot(xx, BT_results, label='scikits.learn (BallTree)')
pl.plot(xx, KDT_results, label='scipy (cKDTree)')
pl.xlabel('number of dimensions')
pl.ylabel('time (seconds)')
pl.legend()
pl.show()
|
Add another balltree benchmark, this time with plot interface.
|
Add another balltree benchmark, this time with plot interface.
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@598 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
|
Python
|
bsd-3-clause
|
YinongLong/scikit-learn,Barmaley-exe/scikit-learn,larsmans/scikit-learn,maheshakya/scikit-learn,marcocaccin/scikit-learn,jblackburne/scikit-learn,xwolf12/scikit-learn,alvarofierroclavero/scikit-learn,mikebenfield/scikit-learn,jakobworldpeace/scikit-learn,yyjiang/scikit-learn,jorge2703/scikit-learn,trungnt13/scikit-learn,waterponey/scikit-learn,qifeigit/scikit-learn,3manuek/scikit-learn,anirudhjayaraman/scikit-learn,aflaxman/scikit-learn,zuku1985/scikit-learn,cauchycui/scikit-learn,tomlof/scikit-learn,mwv/scikit-learn,adamgreenhall/scikit-learn,yask123/scikit-learn,shusenl/scikit-learn,saiwing-yeung/scikit-learn,zaxtax/scikit-learn,quheng/scikit-learn,dingocuster/scikit-learn,kagayakidan/scikit-learn,Aasmi/scikit-learn,ilyes14/scikit-learn,walterreade/scikit-learn,shusenl/scikit-learn,smartscheduling/scikit-learn-categorical-tree,smartscheduling/scikit-learn-categorical-tree,vortex-ape/scikit-learn,ngoix/OCRF,pianomania/scikit-learn,fbagirov/scikit-learn,rajat1994/scikit-learn,tawsifkhan/scikit-learn,mjudsp/Tsallis,iismd17/scikit-learn,pianomania/scikit-learn,RachitKansal/scikit-learn,shenzebang/scikit-learn,robbymeals/scikit-learn,xwolf12/scikit-learn,PatrickChrist/scikit-learn,henridwyer/scikit-learn,elkingtonmcb/scikit-learn,YinongLong/scikit-learn,trankmichael/scikit-learn,lenovor/scikit-learn,hsuantien/scikit-learn,joernhees/scikit-learn,ephes/scikit-learn,ChanderG/scikit-learn,nhejazi/scikit-learn,cauchycui/scikit-learn,cwu2011/scikit-learn,tmhm/scikit-learn,andaag/scikit-learn,walterreade/scikit-learn,TomDLT/scikit-learn,kashif/scikit-learn,mjudsp/Tsallis,Srisai85/scikit-learn,yask123/scikit-learn,lbishal/scikit-learn,lucidfrontier45/scikit-learn,raghavrv/scikit-learn,CVML/scikit-learn,shangwuhencc/scikit-learn,Akshay0724/scikit-learn,f3r/scikit-learn,yyjiang/scikit-learn,Vimos/scikit-learn,dsquareindia/scikit-learn,mhdella/scikit-learn,wzbozon/scikit-learn,aflaxman/scikit-learn,rahuldhote/scikit-learn,clemkoa/scikit-learn,spallavolu/scikit-learn,JosmanPS/scikit-learn,Jimmy-Morzaria/scikit-learn,altairpearl/scikit-learn,ZenDevelopmentSystems/scikit-learn,siutanwong/scikit-learn,Barmaley-exe/scikit-learn,manhhomienbienthuy/scikit-learn,loli/semisupervisedforests,zuku1985/scikit-learn,ephes/scikit-learn,AlexanderFabisch/scikit-learn,scikit-learn/scikit-learn,q1ang/scikit-learn,jayflo/scikit-learn,pkruskal/scikit-learn,xuewei4d/scikit-learn,aewhatley/scikit-learn,jmschrei/scikit-learn,frank-tancf/scikit-learn,phdowling/scikit-learn,mblondel/scikit-learn,manashmndl/scikit-learn,clemkoa/scikit-learn,sgenoud/scikit-learn,bikong2/scikit-learn,eg-zhang/scikit-learn,hitszxp/scikit-learn,billy-inn/scikit-learn,zorojean/scikit-learn,simon-pepin/scikit-learn,gclenaghan/scikit-learn,tawsifkhan/scikit-learn,justincassidy/scikit-learn,rsivapr/scikit-learn,roxyboy/scikit-learn,wlamond/scikit-learn,anirudhjayaraman/scikit-learn,iismd17/scikit-learn,alexsavio/scikit-learn,michigraber/scikit-learn,MatthieuBizien/scikit-learn,jakirkham/scikit-learn,JeanKossaifi/scikit-learn,jereze/scikit-learn,NunoEdgarGub1/scikit-learn,jayflo/scikit-learn,fredhusser/scikit-learn,krez13/scikit-learn,AIML/scikit-learn,Sentient07/scikit-learn,bnaul/scikit-learn,mayblue9/scikit-learn,ssaeger/scikit-learn,Aasmi/scikit-learn,aewhatley/scikit-learn,fredhusser/scikit-learn,trankmichael/scikit-learn,pnedunuri/scikit-learn,victorbergelin/scikit-learn,nomadcube/scikit-learn,saiwing-yeung/scikit-learn,RomainBrault/scikit-learn,jblackburne/scikit-learn,frank-tancf/scikit-learn,DonBeo/scikit-learn,jmetzen/scikit-learn,Lawrence-Liu/scikit-learn,sanketloke/scikit-learn,evgchz/scikit-learn,jorik041/scikit-learn,rahul-c1/scikit-learn,sumspr/scikit-learn,mfjb/scikit-learn,ilyes14/scikit-learn,maheshakya/scikit-learn,shyamalschandra/scikit-learn,IssamLaradji/scikit-learn,huobaowangxi/scikit-learn,MohammedWasim/scikit-learn,theoryno3/scikit-learn,hitszxp/scikit-learn,ZenDevelopmentSystems/scikit-learn,samuel1208/scikit-learn,sonnyhu/scikit-learn,themrmax/scikit-learn,mfjb/scikit-learn,Sentient07/scikit-learn,f3r/scikit-learn,fredhusser/scikit-learn,thilbern/scikit-learn,siutanwong/scikit-learn,tomlof/scikit-learn,jkarnows/scikit-learn,xwolf12/scikit-learn,ishanic/scikit-learn,idlead/scikit-learn,huobaowangxi/scikit-learn,jmschrei/scikit-learn,mhue/scikit-learn,kmike/scikit-learn,hlin117/scikit-learn,pratapvardhan/scikit-learn,djgagne/scikit-learn,anurag313/scikit-learn,amueller/scikit-learn,arahuja/scikit-learn,mrshu/scikit-learn,kmike/scikit-learn,Aasmi/scikit-learn,AlexanderFabisch/scikit-learn,ycaihua/scikit-learn,IssamLaradji/scikit-learn,mhdella/scikit-learn,liangz0707/scikit-learn,manhhomienbienthuy/scikit-learn,thilbern/scikit-learn,shusenl/scikit-learn,LohithBlaze/scikit-learn,CforED/Machine-Learning,nrhine1/scikit-learn,MechCoder/scikit-learn,MechCoder/scikit-learn,aminert/scikit-learn,michigraber/scikit-learn,equialgo/scikit-learn,moutai/scikit-learn,chrisburr/scikit-learn,lazywei/scikit-learn,khkaminska/scikit-learn,thientu/scikit-learn,rahul-c1/scikit-learn,aetilley/scikit-learn,davidgbe/scikit-learn,cdegroc/scikit-learn,kaichogami/scikit-learn,liberatorqjw/scikit-learn,fzalkow/scikit-learn,MartinDelzant/scikit-learn,larsmans/scikit-learn,manhhomienbienthuy/scikit-learn,tosolveit/scikit-learn,equialgo/scikit-learn,chrsrds/scikit-learn,hlin117/scikit-learn,CVML/scikit-learn,AnasGhrab/scikit-learn,pratapvardhan/scikit-learn,phdowling/scikit-learn,tmhm/scikit-learn,manashmndl/scikit-learn,ElDeveloper/scikit-learn,djgagne/scikit-learn,mrshu/scikit-learn,RayMick/scikit-learn,cwu2011/scikit-learn,stylianos-kampakis/scikit-learn,JosmanPS/scikit-learn,eickenberg/scikit-learn,btabibian/scikit-learn,LohithBlaze/scikit-learn,Garrett-R/scikit-learn,JsNoNo/scikit-learn,Windy-Ground/scikit-learn,vermouthmjl/scikit-learn,jm-begon/scikit-learn,0x0all/scikit-learn,vigilv/scikit-learn,bigdataelephants/scikit-learn,Barmaley-exe/scikit-learn,moutai/scikit-learn,glennq/scikit-learn,kashif/scikit-learn,xuewei4d/scikit-learn,ZENGXH/scikit-learn,wazeerzulfikar/scikit-learn,nelson-liu/scikit-learn,MatthieuBizien/scikit-learn,espg/scikit-learn,henridwyer/scikit-learn,ChanderG/scikit-learn,pnedunuri/scikit-learn,jakirkham/scikit-learn,trungnt13/scikit-learn,eg-zhang/scikit-learn,zihua/scikit-learn,maheshakya/scikit-learn,aminert/scikit-learn,depet/scikit-learn,rohanp/scikit-learn,bikong2/scikit-learn,herilalaina/scikit-learn,ClimbsRocks/scikit-learn,0asa/scikit-learn,anntzer/scikit-learn,wanggang3333/scikit-learn,Fireblend/scikit-learn,MohammedWasim/scikit-learn,sumspr/scikit-learn,dsquareindia/scikit-learn,shangwuhencc/scikit-learn,nesterione/scikit-learn,fzalkow/scikit-learn,mattilyra/scikit-learn,jzt5132/scikit-learn,ivannz/scikit-learn,JeanKossaifi/scikit-learn,jorge2703/scikit-learn,OshynSong/scikit-learn,cdegroc/scikit-learn,Nyker510/scikit-learn,kmike/scikit-learn,bhargav/scikit-learn,aetilley/scikit-learn,mwv/scikit-learn,treycausey/scikit-learn,vinayak-mehta/scikit-learn,PatrickOReilly/scikit-learn,ashhher3/scikit-learn,iismd17/scikit-learn,loli/semisupervisedforests,jjx02230808/project0223,jmetzen/scikit-learn,nikitasingh981/scikit-learn,poryfly/scikit-learn,gotomypc/scikit-learn,Achuth17/scikit-learn,loli/semisupervisedforests,kevin-intel/scikit-learn,hugobowne/scikit-learn,kjung/scikit-learn,wlamond/scikit-learn,roxyboy/scikit-learn,ky822/scikit-learn,jpautom/scikit-learn,adamgreenhall/scikit-learn,robin-lai/scikit-learn,Akshay0724/scikit-learn,nikitasingh981/scikit-learn,rahuldhote/scikit-learn,yunfeilu/scikit-learn,mehdidc/scikit-learn,saiwing-yeung/scikit-learn,icdishb/scikit-learn,mehdidc/scikit-learn,cl4rke/scikit-learn,IshankGulati/scikit-learn,ephes/scikit-learn,ssaeger/scikit-learn,rahul-c1/scikit-learn,ldirer/scikit-learn,tosolveit/scikit-learn,ndingwall/scikit-learn,andrewnc/scikit-learn,adamgreenhall/scikit-learn,hlin117/scikit-learn,JeanKossaifi/scikit-learn,huobaowangxi/scikit-learn,cl4rke/scikit-learn,aetilley/scikit-learn,larsmans/scikit-learn,glennq/scikit-learn,voxlol/scikit-learn,CforED/Machine-Learning,ankurankan/scikit-learn,icdishb/scikit-learn,pratapvardhan/scikit-learn,ldirer/scikit-learn,devanshdalal/scikit-learn,treycausey/scikit-learn,kylerbrown/scikit-learn,ChanChiChoi/scikit-learn,Myasuka/scikit-learn,AlexandreAbraham/scikit-learn,murali-munna/scikit-learn,Adai0808/scikit-learn,tomlof/scikit-learn,xubenben/scikit-learn,fyffyt/scikit-learn,iismd17/scikit-learn,appapantula/scikit-learn,ilo10/scikit-learn,zorojean/scikit-learn,russel1237/scikit-learn,betatim/scikit-learn,potash/scikit-learn,rexshihaoren/scikit-learn,lazywei/scikit-learn,theoryno3/scikit-learn,jpautom/scikit-learn,wazeerzulfikar/scikit-learn,dingocuster/scikit-learn,joernhees/scikit-learn,wanggang3333/scikit-learn,ogrisel/scikit-learn,RayMick/scikit-learn,joshloyal/scikit-learn,mojoboss/scikit-learn,pnedunuri/scikit-learn,hsiaoyi0504/scikit-learn,ZENGXH/scikit-learn,3manuek/scikit-learn,UNR-AERIAL/scikit-learn,yonglehou/scikit-learn,zhenv5/scikit-learn,roxyboy/scikit-learn,vivekmishra1991/scikit-learn,sumspr/scikit-learn,eickenberg/scikit-learn,JPFrancoia/scikit-learn,mlyundin/scikit-learn,joshloyal/scikit-learn,idlead/scikit-learn,466152112/scikit-learn,mojoboss/scikit-learn,vinayak-mehta/scikit-learn,bnaul/scikit-learn,rrohan/scikit-learn,Titan-C/scikit-learn,altairpearl/scikit-learn,wzbozon/scikit-learn,lin-credible/scikit-learn,terkkila/scikit-learn,gclenaghan/scikit-learn,rvraghav93/scikit-learn,costypetrisor/scikit-learn,r-mart/scikit-learn,sonnyhu/scikit-learn,nvoron23/scikit-learn,spallavolu/scikit-learn,HolgerPeters/scikit-learn,cainiaocome/scikit-learn,vshtanko/scikit-learn,fabioticconi/scikit-learn,Myasuka/scikit-learn,kjung/scikit-learn,yanlend/scikit-learn,h2educ/scikit-learn,vortex-ape/scikit-learn,maheshakya/scikit-learn,Djabbz/scikit-learn,kylerbrown/scikit-learn,voxlol/scikit-learn,treycausey/scikit-learn,BiaDarkia/scikit-learn,rishikksh20/scikit-learn,xyguo/scikit-learn,billy-inn/scikit-learn,costypetrisor/scikit-learn,jmschrei/scikit-learn,equialgo/scikit-learn,scikit-learn/scikit-learn,ankurankan/scikit-learn,treycausey/scikit-learn,bikong2/scikit-learn,meduz/scikit-learn,jseabold/scikit-learn,fengzhyuan/scikit-learn,kevin-intel/scikit-learn,mugizico/scikit-learn,djgagne/scikit-learn,nelson-liu/scikit-learn,nvoron23/scikit-learn,spallavolu/scikit-learn,heli522/scikit-learn,hsuantien/scikit-learn,djgagne/scikit-learn,NelisVerhoef/scikit-learn,MechCoder/scikit-learn,ogrisel/scikit-learn,rajat1994/scikit-learn,cdegroc/scikit-learn,NelisVerhoef/scikit-learn,UNR-AERIAL/scikit-learn,plissonf/scikit-learn,pypot/scikit-learn,davidgbe/scikit-learn,sgenoud/scikit-learn,elkingtonmcb/scikit-learn,BiaDarkia/scikit-learn,idlead/scikit-learn,RPGOne/scikit-learn,cainiaocome/scikit-learn,ashhher3/scikit-learn,gotomypc/scikit-learn,chrsrds/scikit-learn,ZenDevelopmentSystems/scikit-learn,chrsrds/scikit-learn,mjgrav2001/scikit-learn,massmutual/scikit-learn,bigdataelephants/scikit-learn,manashmndl/scikit-learn,maheshakya/scikit-learn,LiaoPan/scikit-learn,yask123/scikit-learn,mjgrav2001/scikit-learn,mehdidc/scikit-learn,sinhrks/scikit-learn,0asa/scikit-learn,carrillo/scikit-learn,alvarofierroclavero/scikit-learn,lbishal/scikit-learn,samuel1208/scikit-learn,loli/sklearn-ensembletrees,MartinDelzant/scikit-learn,ClimbsRocks/scikit-learn,justincassidy/scikit-learn,zorojean/scikit-learn,ashhher3/scikit-learn,jpautom/scikit-learn,mfjb/scikit-learn,jaidevd/scikit-learn,Sentient07/scikit-learn,lin-credible/scikit-learn,kmike/scikit-learn,NunoEdgarGub1/scikit-learn,shahankhatch/scikit-learn,RachitKansal/scikit-learn,deepesch/scikit-learn,sinhrks/scikit-learn,wzbozon/scikit-learn,AlexandreAbraham/scikit-learn,r-mart/scikit-learn,lenovor/scikit-learn,nvoron23/scikit-learn,dsullivan7/scikit-learn,anntzer/scikit-learn,mblondel/scikit-learn,liyu1990/sklearn,olologin/scikit-learn,shahankhatch/scikit-learn,CVML/scikit-learn,aewhatley/scikit-learn,mugizico/scikit-learn,nelson-liu/scikit-learn,BiaDarkia/scikit-learn,aminert/scikit-learn,jorge2703/scikit-learn,PatrickChrist/scikit-learn,fabianp/scikit-learn,untom/scikit-learn,DonBeo/scikit-learn,mattgiguere/scikit-learn,q1ang/scikit-learn,ilyes14/scikit-learn,bthirion/scikit-learn,anirudhjayaraman/scikit-learn,clemkoa/scikit-learn,aflaxman/scikit-learn,harshaneelhg/scikit-learn,andrewnc/scikit-learn,alexsavio/scikit-learn,vivekmishra1991/scikit-learn,hdmetor/scikit-learn,ngoix/OCRF,zorroblue/scikit-learn,JosmanPS/scikit-learn,hrjn/scikit-learn,belltailjp/scikit-learn,hdmetor/scikit-learn,h2educ/scikit-learn,devanshdalal/scikit-learn,belltailjp/scikit-learn,Titan-C/scikit-learn,zaxtax/scikit-learn,h2educ/scikit-learn,OshynSong/scikit-learn,ominux/scikit-learn,zihua/scikit-learn,cwu2011/scikit-learn,0x0all/scikit-learn,carrillo/scikit-learn,zuku1985/scikit-learn,RPGOne/scikit-learn,JPFrancoia/scikit-learn,ChanderG/scikit-learn,jorik041/scikit-learn,frank-tancf/scikit-learn,shyamalschandra/scikit-learn,olologin/scikit-learn,liyu1990/sklearn,plissonf/scikit-learn,AlexanderFabisch/scikit-learn,alvarofierroclavero/scikit-learn,lesteve/scikit-learn,vybstat/scikit-learn,quheng/scikit-learn,petosegan/scikit-learn,PrashntS/scikit-learn,ilo10/scikit-learn,ElDeveloper/scikit-learn,mblondel/scikit-learn,dsullivan7/scikit-learn,f3r/scikit-learn,ishanic/scikit-learn,andrewnc/scikit-learn,B3AU/waveTree,CforED/Machine-Learning,cybernet14/scikit-learn,kjung/scikit-learn,shahankhatch/scikit-learn,fyffyt/scikit-learn,0asa/scikit-learn,madjelan/scikit-learn,ClimbsRocks/scikit-learn,petosegan/scikit-learn,jakobworldpeace/scikit-learn,pv/scikit-learn,liberatorqjw/scikit-learn,jereze/scikit-learn,rajat1994/scikit-learn,vigilv/scikit-learn,etkirsch/scikit-learn,ahoyosid/scikit-learn,r-mart/scikit-learn,Djabbz/scikit-learn,abhishekgahlot/scikit-learn,rahuldhote/scikit-learn,luo66/scikit-learn,ElDeveloper/scikit-learn,ephes/scikit-learn,AlexRobson/scikit-learn,loli/sklearn-ensembletrees,cl4rke/scikit-learn,jpautom/scikit-learn,jlegendary/scikit-learn,ngoix/OCRF,anurag313/scikit-learn,imaculate/scikit-learn,mhdella/scikit-learn,cybernet14/scikit-learn,arjoly/scikit-learn,chrsrds/scikit-learn,eg-zhang/scikit-learn,yyjiang/scikit-learn,Garrett-R/scikit-learn,ahoyosid/scikit-learn,abimannans/scikit-learn,fabioticconi/scikit-learn,hsiaoyi0504/scikit-learn,betatim/scikit-learn,akionakamura/scikit-learn,jakobworldpeace/scikit-learn,moutai/scikit-learn,nvoron23/scikit-learn,PatrickChrist/scikit-learn,joernhees/scikit-learn,loli/sklearn-ensembletrees,andrewnc/scikit-learn,Clyde-fare/scikit-learn,robbymeals/scikit-learn,mfjb/scikit-learn,xuewei4d/scikit-learn,mwv/scikit-learn,hitszxp/scikit-learn,wanggang3333/scikit-learn,dhruv13J/scikit-learn,evgchz/scikit-learn,akionakamura/scikit-learn,mwv/scikit-learn,sumspr/scikit-learn,beepee14/scikit-learn,jzt5132/scikit-learn,bhargav/scikit-learn,gclenaghan/scikit-learn,arahuja/scikit-learn,eickenberg/scikit-learn,jblackburne/scikit-learn,adamgreenhall/scikit-learn,ycaihua/scikit-learn,abimannans/scikit-learn,jakobworldpeace/scikit-learn,beepee14/scikit-learn,ndingwall/scikit-learn,xiaoxiamii/scikit-learn,HolgerPeters/scikit-learn,mayblue9/scikit-learn,shikhardb/scikit-learn,0x0all/scikit-learn,mblondel/scikit-learn,Obus/scikit-learn,anntzer/scikit-learn,mehdidc/scikit-learn,ky822/scikit-learn,herilalaina/scikit-learn,madjelan/scikit-learn,Obus/scikit-learn,pompiduskus/scikit-learn,466152112/scikit-learn,themrmax/scikit-learn,florian-f/sklearn,PrashntS/scikit-learn,kylerbrown/scikit-learn,deepesch/scikit-learn,r-mart/scikit-learn,alexeyum/scikit-learn,anurag313/scikit-learn,rrohan/scikit-learn,mattgiguere/scikit-learn,Barmaley-exe/scikit-learn,hugobowne/scikit-learn,abhishekgahlot/scikit-learn,sonnyhu/scikit-learn,ningchi/scikit-learn,rohanp/scikit-learn,xyguo/scikit-learn,tdhopper/scikit-learn,tomlof/scikit-learn,zihua/scikit-learn,mlyundin/scikit-learn,hugobowne/scikit-learn,jaidevd/scikit-learn,JsNoNo/scikit-learn,anirudhjayaraman/scikit-learn,kashif/scikit-learn,loli/sklearn-ensembletrees,RomainBrault/scikit-learn,mrshu/scikit-learn,nesterione/scikit-learn,Adai0808/scikit-learn,LiaoPan/scikit-learn,belltailjp/scikit-learn,arjoly/scikit-learn,victorbergelin/scikit-learn,TomDLT/scikit-learn,IssamLaradji/scikit-learn,vibhorag/scikit-learn,aminert/scikit-learn,depet/scikit-learn,arabenjamin/scikit-learn,mhdella/scikit-learn,pkruskal/scikit-learn,luo66/scikit-learn,ltiao/scikit-learn,AnasGhrab/scikit-learn,jjx02230808/project0223,fengzhyuan/scikit-learn,mjudsp/Tsallis,frank-tancf/scikit-learn,thilbern/scikit-learn,jlegendary/scikit-learn,nomadcube/scikit-learn,shikhardb/scikit-learn,liangz0707/scikit-learn,phdowling/scikit-learn,nikitasingh981/scikit-learn,ningchi/scikit-learn,hainm/scikit-learn,xyguo/scikit-learn,anntzer/scikit-learn,larsmans/scikit-learn,NunoEdgarGub1/scikit-learn,appapantula/scikit-learn,mattilyra/scikit-learn,B3AU/waveTree,xavierwu/scikit-learn,madjelan/scikit-learn,glennq/scikit-learn,toastedcornflakes/scikit-learn,Srisai85/scikit-learn,yanlend/scikit-learn,lucidfrontier45/scikit-learn,fyffyt/scikit-learn,icdishb/scikit-learn,glouppe/scikit-learn,kylerbrown/scikit-learn,chrisburr/scikit-learn,rvraghav93/scikit-learn,fbagirov/scikit-learn,tmhm/scikit-learn,rajat1994/scikit-learn,0x0all/scikit-learn,vshtanko/scikit-learn,nhejazi/scikit-learn,aabadie/scikit-learn,nomadcube/scikit-learn,spallavolu/scikit-learn,sarahgrogan/scikit-learn,amueller/scikit-learn,xubenben/scikit-learn,heli522/scikit-learn,michigraber/scikit-learn,xavierwu/scikit-learn,ky822/scikit-learn,ominux/scikit-learn,IshankGulati/scikit-learn,walterreade/scikit-learn,JsNoNo/scikit-learn,appapantula/scikit-learn,DonBeo/scikit-learn,potash/scikit-learn,pratapvardhan/scikit-learn,pnedunuri/scikit-learn,jaidevd/scikit-learn,huzq/scikit-learn,trankmichael/scikit-learn,btabibian/scikit-learn,pianomania/scikit-learn,nrhine1/scikit-learn,qifeigit/scikit-learn,yunfeilu/scikit-learn,etkirsch/scikit-learn,krez13/scikit-learn,IshankGulati/scikit-learn,henrykironde/scikit-learn,fabianp/scikit-learn,shangwuhencc/scikit-learn,fengzhyuan/scikit-learn,clemkoa/scikit-learn,belltailjp/scikit-learn,procoder317/scikit-learn,imaculate/scikit-learn,potash/scikit-learn,hainm/scikit-learn,IndraVikas/scikit-learn,Windy-Ground/scikit-learn,potash/scikit-learn,saiwing-yeung/scikit-learn,466152112/scikit-learn,deepesch/scikit-learn,cybernet14/scikit-learn,ycaihua/scikit-learn,chrisburr/scikit-learn,russel1237/scikit-learn,abhishekkrthakur/scikit-learn,raghavrv/scikit-learn,procoder317/scikit-learn,joshloyal/scikit-learn,arabenjamin/scikit-learn,cybernet14/scikit-learn,mxjl620/scikit-learn,joshloyal/scikit-learn,luo66/scikit-learn,0x0all/scikit-learn,pypot/scikit-learn,hlin117/scikit-learn,giorgiop/scikit-learn,vortex-ape/scikit-learn,MatthieuBizien/scikit-learn,herilalaina/scikit-learn,aetilley/scikit-learn,Akshay0724/scikit-learn,sinhrks/scikit-learn,liyu1990/sklearn,schets/scikit-learn,gclenaghan/scikit-learn,harshaneelhg/scikit-learn,Windy-Ground/scikit-learn,zuku1985/scikit-learn,billy-inn/scikit-learn,pythonvietnam/scikit-learn,meduz/scikit-learn,dsullivan7/scikit-learn,schets/scikit-learn,lazywei/scikit-learn,dsullivan7/scikit-learn,alexsavio/scikit-learn,qifeigit/scikit-learn,IndraVikas/scikit-learn,ilo10/scikit-learn,mattilyra/scikit-learn,B3AU/waveTree,alvarofierroclavero/scikit-learn,thientu/scikit-learn,vortex-ape/scikit-learn,jm-begon/scikit-learn,sergeyf/scikit-learn,deepesch/scikit-learn,CVML/scikit-learn,chrisburr/scikit-learn,466152112/scikit-learn,pompiduskus/scikit-learn,pythonvietnam/scikit-learn,florian-f/sklearn,JeanKossaifi/scikit-learn,jereze/scikit-learn,ndingwall/scikit-learn,JosmanPS/scikit-learn,AlexRobson/scikit-learn,abhishekgahlot/scikit-learn,henrykironde/scikit-learn,aabadie/scikit-learn,jm-begon/scikit-learn,glouppe/scikit-learn,AlexRobson/scikit-learn,dingocuster/scikit-learn,AnasGhrab/scikit-learn,terkkila/scikit-learn,xwolf12/scikit-learn,samuel1208/scikit-learn,simon-pepin/scikit-learn,bnaul/scikit-learn,pythonvietnam/scikit-learn,kaichogami/scikit-learn,bigdataelephants/scikit-learn,alexsavio/scikit-learn,mjudsp/Tsallis,tdhopper/scikit-learn,rohanp/scikit-learn,trankmichael/scikit-learn,xzh86/scikit-learn,costypetrisor/scikit-learn,jseabold/scikit-learn,mjgrav2001/scikit-learn,loli/semisupervisedforests,ngoix/OCRF,sgenoud/scikit-learn,jorik041/scikit-learn,lesteve/scikit-learn,depet/scikit-learn,zaxtax/scikit-learn,pkruskal/scikit-learn,jseabold/scikit-learn,loli/sklearn-ensembletrees,JPFrancoia/scikit-learn,OshynSong/scikit-learn,hsuantien/scikit-learn,jayflo/scikit-learn,arabenjamin/scikit-learn,beepee14/scikit-learn,hrjn/scikit-learn,aflaxman/scikit-learn,hitszxp/scikit-learn,MartinSavc/scikit-learn,bnaul/scikit-learn,harshaneelhg/scikit-learn,RayMick/scikit-learn,amueller/scikit-learn,Djabbz/scikit-learn,hrjn/scikit-learn,Jimmy-Morzaria/scikit-learn,Adai0808/scikit-learn,untom/scikit-learn,Srisai85/scikit-learn,Adai0808/scikit-learn,NelisVerhoef/scikit-learn,Fireblend/scikit-learn,nesterione/scikit-learn,tmhm/scikit-learn,tawsifkhan/scikit-learn,depet/scikit-learn,toastedcornflakes/scikit-learn,pv/scikit-learn,dsquareindia/scikit-learn,IshankGulati/scikit-learn,shikhardb/scikit-learn,mugizico/scikit-learn,3manuek/scikit-learn,mugizico/scikit-learn,massmutual/scikit-learn,jmschrei/scikit-learn,xavierwu/scikit-learn,ZENGXH/scikit-learn,xubenben/scikit-learn,PatrickOReilly/scikit-learn,Jimmy-Morzaria/scikit-learn,Achuth17/scikit-learn,alexeyum/scikit-learn,jjx02230808/project0223,carrillo/scikit-learn,jaidevd/scikit-learn,jm-begon/scikit-learn,manashmndl/scikit-learn,robbymeals/scikit-learn,bthirion/scikit-learn,kagayakidan/scikit-learn,eickenberg/scikit-learn,zhenv5/scikit-learn,manhhomienbienthuy/scikit-learn,theoryno3/scikit-learn,schets/scikit-learn,kagayakidan/scikit-learn,AlexRobson/scikit-learn,procoder317/scikit-learn,lbishal/scikit-learn,ankurankan/scikit-learn,trungnt13/scikit-learn,TomDLT/scikit-learn,ahoyosid/scikit-learn,beepee14/scikit-learn,ky822/scikit-learn,poryfly/scikit-learn,Achuth17/scikit-learn,ishanic/scikit-learn,Nyker510/scikit-learn,larsmans/scikit-learn,YinongLong/scikit-learn,rexshihaoren/scikit-learn,lazywei/scikit-learn,sanketloke/scikit-learn,arjoly/scikit-learn,Myasuka/scikit-learn,bhargav/scikit-learn,Lawrence-Liu/scikit-learn,dhruv13J/scikit-learn,harshaneelhg/scikit-learn,glemaitre/scikit-learn,evgchz/scikit-learn,Titan-C/scikit-learn,vybstat/scikit-learn,khkaminska/scikit-learn,0asa/scikit-learn,thientu/scikit-learn,rsivapr/scikit-learn,ChanChiChoi/scikit-learn,arabenjamin/scikit-learn,RayMick/scikit-learn,huzq/scikit-learn,pv/scikit-learn,vybstat/scikit-learn,lenovor/scikit-learn,fzalkow/scikit-learn,jlegendary/scikit-learn,rishikksh20/scikit-learn,idlead/scikit-learn,rrohan/scikit-learn,giorgiop/scikit-learn,thilbern/scikit-learn,JsNoNo/scikit-learn,amueller/scikit-learn,procoder317/scikit-learn,rishikksh20/scikit-learn,Aasmi/scikit-learn,LohithBlaze/scikit-learn,andaag/scikit-learn,nmayorov/scikit-learn,nikitasingh981/scikit-learn,cwu2011/scikit-learn,q1ang/scikit-learn,petosegan/scikit-learn,shikhardb/scikit-learn,sergeyf/scikit-learn,sarahgrogan/scikit-learn,HolgerPeters/scikit-learn,ivannz/scikit-learn,mxjl620/scikit-learn,mjudsp/Tsallis,florian-f/sklearn,jzt5132/scikit-learn,russel1237/scikit-learn,ycaihua/scikit-learn,rohanp/scikit-learn,rishikksh20/scikit-learn,AIML/scikit-learn,IndraVikas/scikit-learn,cainiaocome/scikit-learn,carrillo/scikit-learn,murali-munna/scikit-learn,wanggang3333/scikit-learn,lesteve/scikit-learn,vivekmishra1991/scikit-learn,zorojean/scikit-learn,imaculate/scikit-learn,giorgiop/scikit-learn,samzhang111/scikit-learn,mhue/scikit-learn,terkkila/scikit-learn,nmayorov/scikit-learn,scikit-learn/scikit-learn,AIML/scikit-learn,tdhopper/scikit-learn,jkarnows/scikit-learn,massmutual/scikit-learn,mhue/scikit-learn,pkruskal/scikit-learn,mikebenfield/scikit-learn,cainiaocome/scikit-learn,AlexanderFabisch/scikit-learn,bhargav/scikit-learn,waterponey/scikit-learn,qifeigit/scikit-learn,florian-f/sklearn,toastedcornflakes/scikit-learn,aewhatley/scikit-learn,AlexandreAbraham/scikit-learn,nhejazi/scikit-learn,macks22/scikit-learn,Nyker510/scikit-learn,akionakamura/scikit-learn,wlamond/scikit-learn,sanketloke/scikit-learn,eickenberg/scikit-learn,sgenoud/scikit-learn,ssaeger/scikit-learn,huzq/scikit-learn,ycaihua/scikit-learn,hainm/scikit-learn,nrhine1/scikit-learn,poryfly/scikit-learn,icdishb/scikit-learn,espg/scikit-learn,Obus/scikit-learn,fabioticconi/scikit-learn,vshtanko/scikit-learn,RomainBrault/scikit-learn,Obus/scikit-learn,0asa/scikit-learn,mattgiguere/scikit-learn,nelson-liu/scikit-learn,hitszxp/scikit-learn,henrykironde/scikit-learn,mayblue9/scikit-learn,hdmetor/scikit-learn,Lawrence-Liu/scikit-learn,vibhorag/scikit-learn,IndraVikas/scikit-learn,LohithBlaze/scikit-learn,ltiao/scikit-learn,cauchycui/scikit-learn,YinongLong/scikit-learn,bthirion/scikit-learn,xzh86/scikit-learn,nmayorov/scikit-learn,PatrickOReilly/scikit-learn,giorgiop/scikit-learn,fabianp/scikit-learn,plissonf/scikit-learn,B3AU/waveTree,evgchz/scikit-learn,shenzebang/scikit-learn,liberatorqjw/scikit-learn,abimannans/scikit-learn,glouppe/scikit-learn,ChanChiChoi/scikit-learn,sinhrks/scikit-learn,glemaitre/scikit-learn,lenovor/scikit-learn,fredhusser/scikit-learn,shahankhatch/scikit-learn,elkingtonmcb/scikit-learn,murali-munna/scikit-learn,xiaoxiamii/scikit-learn,terkkila/scikit-learn,ngoix/OCRF,henrykironde/scikit-learn,samzhang111/scikit-learn,siutanwong/scikit-learn,rsivapr/scikit-learn,lesteve/scikit-learn,ankurankan/scikit-learn,wazeerzulfikar/scikit-learn,petosegan/scikit-learn,yask123/scikit-learn,q1ang/scikit-learn,MatthieuBizien/scikit-learn,RPGOne/scikit-learn,ilo10/scikit-learn,ogrisel/scikit-learn,MartinSavc/scikit-learn,pypot/scikit-learn,vermouthmjl/scikit-learn,kevin-intel/scikit-learn,marcocaccin/scikit-learn,abhishekgahlot/scikit-learn,ZenDevelopmentSystems/scikit-learn,imaculate/scikit-learn,bikong2/scikit-learn,yunfeilu/scikit-learn,vigilv/scikit-learn,bigdataelephants/scikit-learn,khkaminska/scikit-learn,mjgrav2001/scikit-learn,hainm/scikit-learn,glouppe/scikit-learn,xiaoxiamii/scikit-learn,treycausey/scikit-learn,vermouthmjl/scikit-learn,jjx02230808/project0223,tosolveit/scikit-learn,scikit-learn/scikit-learn,ankurankan/scikit-learn,devanshdalal/scikit-learn,trungnt13/scikit-learn,ashhher3/scikit-learn,RomainBrault/scikit-learn,robin-lai/scikit-learn,zorroblue/scikit-learn,rrohan/scikit-learn,rexshihaoren/scikit-learn,shenzebang/scikit-learn,abhishekkrthakur/scikit-learn,fbagirov/scikit-learn,rahuldhote/scikit-learn,pompiduskus/scikit-learn,rsivapr/scikit-learn,pianomania/scikit-learn,ishanic/scikit-learn,3manuek/scikit-learn,espg/scikit-learn,Clyde-fare/scikit-learn,robin-lai/scikit-learn,untom/scikit-learn,IssamLaradji/scikit-learn,sanketloke/scikit-learn,kagayakidan/scikit-learn,Myasuka/scikit-learn,luo66/scikit-learn,AnasGhrab/scikit-learn,waterponey/scikit-learn,Achuth17/scikit-learn,mojoboss/scikit-learn,hsiaoyi0504/scikit-learn,dsquareindia/scikit-learn,heli522/scikit-learn,mhue/scikit-learn,ltiao/scikit-learn,fabioticconi/scikit-learn,depet/scikit-learn,quheng/scikit-learn,PatrickOReilly/scikit-learn,LiaoPan/scikit-learn,sgenoud/scikit-learn,jkarnows/scikit-learn,equialgo/scikit-learn,btabibian/scikit-learn,MechCoder/scikit-learn,lucidfrontier45/scikit-learn,pythonvietnam/scikit-learn,simon-pepin/scikit-learn,fzalkow/scikit-learn,Clyde-fare/scikit-learn,Vimos/scikit-learn,vermouthmjl/scikit-learn,murali-munna/scikit-learn,nmayorov/scikit-learn,smartscheduling/scikit-learn-categorical-tree,victorbergelin/scikit-learn,Djabbz/scikit-learn,marcocaccin/scikit-learn,krez13/scikit-learn,jereze/scikit-learn,liangz0707/scikit-learn,evgchz/scikit-learn,ChanderG/scikit-learn,kashif/scikit-learn,DSLituiev/scikit-learn,MartinDelzant/scikit-learn,yunfeilu/scikit-learn,ldirer/scikit-learn,themrmax/scikit-learn,alexeyum/scikit-learn,bthirion/scikit-learn,pompiduskus/scikit-learn,yonglehou/scikit-learn,olologin/scikit-learn,lucidfrontier45/scikit-learn,samzhang111/scikit-learn,nesterione/scikit-learn,sonnyhu/scikit-learn,Garrett-R/scikit-learn,fabianp/scikit-learn,hdmetor/scikit-learn,andaag/scikit-learn,anurag313/scikit-learn,LiaoPan/scikit-learn,rvraghav93/scikit-learn,schets/scikit-learn,massmutual/scikit-learn,hugobowne/scikit-learn,zorroblue/scikit-learn,shusenl/scikit-learn,HolgerPeters/scikit-learn,abhishekkrthakur/scikit-learn,mayblue9/scikit-learn,ClimbsRocks/scikit-learn,NunoEdgarGub1/scikit-learn,kmike/scikit-learn,kevin-intel/scikit-learn,samuel1208/scikit-learn,henridwyer/scikit-learn,mattilyra/scikit-learn,Clyde-fare/scikit-learn,ngoix/OCRF,simon-pepin/scikit-learn,Vimos/scikit-learn,kaichogami/scikit-learn,eg-zhang/scikit-learn,DonBeo/scikit-learn,heli522/scikit-learn,theoryno3/scikit-learn,zhenv5/scikit-learn,hrjn/scikit-learn,altairpearl/scikit-learn,arahuja/scikit-learn,devanshdalal/scikit-learn,jkarnows/scikit-learn,f3r/scikit-learn,davidgbe/scikit-learn,zorroblue/scikit-learn,sarahgrogan/scikit-learn,nomadcube/scikit-learn,alexeyum/scikit-learn,liberatorqjw/scikit-learn,andaag/scikit-learn,stylianos-kampakis/scikit-learn,jblackburne/scikit-learn,meduz/scikit-learn,sergeyf/scikit-learn,yyjiang/scikit-learn,ndingwall/scikit-learn,betatim/scikit-learn,zhenv5/scikit-learn,arahuja/scikit-learn,akionakamura/scikit-learn,costypetrisor/scikit-learn,tawsifkhan/scikit-learn,mlyundin/scikit-learn,AlexandreAbraham/scikit-learn,fengzhyuan/scikit-learn,jakirkham/scikit-learn,plissonf/scikit-learn,DSLituiev/scikit-learn,smartscheduling/scikit-learn-categorical-tree,ivannz/scikit-learn,mojoboss/scikit-learn,thientu/scikit-learn,jorik041/scikit-learn,mrshu/scikit-learn,khkaminska/scikit-learn,CforED/Machine-Learning,jmetzen/scikit-learn,vibhorag/scikit-learn,tdhopper/scikit-learn,MartinDelzant/scikit-learn,mikebenfield/scikit-learn,justincassidy/scikit-learn,raghavrv/scikit-learn,ltiao/scikit-learn,NelisVerhoef/scikit-learn,shangwuhencc/scikit-learn,aabadie/scikit-learn,mrshu/scikit-learn,waterponey/scikit-learn,stylianos-kampakis/scikit-learn,RachitKansal/scikit-learn,Fireblend/scikit-learn,poryfly/scikit-learn,yanlend/scikit-learn,fbagirov/scikit-learn,abhishekkrthakur/scikit-learn,Garrett-R/scikit-learn,wazeerzulfikar/scikit-learn,UNR-AERIAL/scikit-learn,meduz/scikit-learn,ilyes14/scikit-learn,ldirer/scikit-learn,macks22/scikit-learn,stylianos-kampakis/scikit-learn,yanlend/scikit-learn,MohammedWasim/scikit-learn,zaxtax/scikit-learn,mxjl620/scikit-learn,glemaitre/scikit-learn,robin-lai/scikit-learn,vybstat/scikit-learn,jlegendary/scikit-learn,shenzebang/scikit-learn,voxlol/scikit-learn,liangz0707/scikit-learn,moutai/scikit-learn,Nyker510/scikit-learn,ominux/scikit-learn,gotomypc/scikit-learn,PrashntS/scikit-learn,florian-f/sklearn,Windy-Ground/scikit-learn,davidgbe/scikit-learn,themrmax/scikit-learn,dhruv13J/scikit-learn,ningchi/scikit-learn,samzhang111/scikit-learn,madjelan/scikit-learn,TomDLT/scikit-learn,macks22/scikit-learn,tosolveit/scikit-learn,mattgiguere/scikit-learn,arjoly/scikit-learn,espg/scikit-learn,voxlol/scikit-learn,RachitKansal/scikit-learn,MohammedWasim/scikit-learn,huobaowangxi/scikit-learn,dhruv13J/scikit-learn,JPFrancoia/scikit-learn,ZENGXH/scikit-learn,abimannans/scikit-learn,roxyboy/scikit-learn,ElDeveloper/scikit-learn,mxjl620/scikit-learn,wlamond/scikit-learn,RPGOne/scikit-learn,vivekmishra1991/scikit-learn,Sentient07/scikit-learn,glemaitre/scikit-learn,ChanChiChoi/scikit-learn,nhejazi/scikit-learn,etkirsch/scikit-learn,AIML/scikit-learn,krez13/scikit-learn,untom/scikit-learn,huzq/scikit-learn,jmetzen/scikit-learn,dingocuster/scikit-learn,vigilv/scikit-learn,abhishekgahlot/scikit-learn,Jimmy-Morzaria/scikit-learn,shyamalschandra/scikit-learn,Titan-C/scikit-learn,ahoyosid/scikit-learn,Vimos/scikit-learn,PatrickChrist/scikit-learn,MartinSavc/scikit-learn,B3AU/waveTree,herilalaina/scikit-learn,walterreade/scikit-learn,wzbozon/scikit-learn,xyguo/scikit-learn,kjung/scikit-learn,altairpearl/scikit-learn,raghavrv/scikit-learn,Fireblend/scikit-learn,yonglehou/scikit-learn,russel1237/scikit-learn,hsuantien/scikit-learn,Srisai85/scikit-learn,glennq/scikit-learn,siutanwong/scikit-learn,fyffyt/scikit-learn,DSLituiev/scikit-learn,lucidfrontier45/scikit-learn,gotomypc/scikit-learn,phdowling/scikit-learn,aabadie/scikit-learn,Lawrence-Liu/scikit-learn,pv/scikit-learn,mattilyra/scikit-learn,victorbergelin/scikit-learn,vshtanko/scikit-learn,zihua/scikit-learn,DSLituiev/scikit-learn,cauchycui/scikit-learn,xuewei4d/scikit-learn,UNR-AERIAL/scikit-learn,xiaoxiamii/scikit-learn,rahul-c1/scikit-learn,ominux/scikit-learn,xzh86/scikit-learn,joernhees/scikit-learn,ssaeger/scikit-learn,jorge2703/scikit-learn,vibhorag/scikit-learn,elkingtonmcb/scikit-learn,rvraghav93/scikit-learn,henridwyer/scikit-learn,Akshay0724/scikit-learn,macks22/scikit-learn,h2educ/scikit-learn,justincassidy/scikit-learn,mikebenfield/scikit-learn,jseabold/scikit-learn,cl4rke/scikit-learn,quheng/scikit-learn,ningchi/scikit-learn,etkirsch/scikit-learn,robbymeals/scikit-learn,liyu1990/sklearn,lin-credible/scikit-learn,lin-credible/scikit-learn,xavierwu/scikit-learn,cdegroc/scikit-learn,kaichogami/scikit-learn,michigraber/scikit-learn,hsiaoyi0504/scikit-learn,jayflo/scikit-learn,yonglehou/scikit-learn,xzh86/scikit-learn,pypot/scikit-learn,jakirkham/scikit-learn,jzt5132/scikit-learn,ogrisel/scikit-learn,betatim/scikit-learn,marcocaccin/scikit-learn,shyamalschandra/scikit-learn,ivannz/scikit-learn,appapantula/scikit-learn,BiaDarkia/scikit-learn,lbishal/scikit-learn,xubenben/scikit-learn,Garrett-R/scikit-learn,mlyundin/scikit-learn,vinayak-mehta/scikit-learn,rexshihaoren/scikit-learn,vinayak-mehta/scikit-learn,btabibian/scikit-learn,rsivapr/scikit-learn,toastedcornflakes/scikit-learn,billy-inn/scikit-learn,OshynSong/scikit-learn,sergeyf/scikit-learn,MartinSavc/scikit-learn,PrashntS/scikit-learn,sarahgrogan/scikit-learn,nrhine1/scikit-learn,olologin/scikit-learn
|
Add another balltree benchmark, this time with plot interface.
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@598 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
|
from scikits.learn.BallTree import BallTree, knn_brute
import numpy as np
from time import time
from scipy.spatial import cKDTree
import sys
import pylab as pl
def compare_nbrs(nbrs1,nbrs2):
assert nbrs1.shape == nbrs2.shape
if(nbrs1.ndim == 2):
N,k = nbrs1.shape
for i in range(N):
for j in range(k):
if nbrs1[i,j]==i:
continue
elif nbrs1[i,j] not in nbrs2[i]:
return False
return True
elif(nbrs1.ndim == 1):
N = len(nbrs1)
return numpy.all(nbrs1 == nbrs2)
N = 1000
ls = 1 # leaf size
k = 20
BT_results = []
KDT_results = []
for i in range(1, 10):
print 'Iteration %s' %i
D = i*100
M = np.random.random([N, D])
t0 = time()
BT = BallTree(M, ls)
d, nbrs1 = BT.query(M, k)
delta = time() - t0
BT_results.append(delta)
t0 = time()
KDT = cKDTree(M, ls)
d, nbrs2 = KDT.query(M, k)
delta = time() - t0
KDT_results.append(delta)
# this checks we get the correct result
assert compare_nbrs(nbrs1,nbrs2)
xx = 100*np.arange(1, 10)
pl.plot(xx, BT_results, label='scikits.learn (BallTree)')
pl.plot(xx, KDT_results, label='scipy (cKDTree)')
pl.xlabel('number of dimensions')
pl.ylabel('time (seconds)')
pl.legend()
pl.show()
|
<commit_before><commit_msg>Add another balltree benchmark, this time with plot interface.
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@598 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8<commit_after>
|
from scikits.learn.BallTree import BallTree, knn_brute
import numpy as np
from time import time
from scipy.spatial import cKDTree
import sys
import pylab as pl
def compare_nbrs(nbrs1,nbrs2):
assert nbrs1.shape == nbrs2.shape
if(nbrs1.ndim == 2):
N,k = nbrs1.shape
for i in range(N):
for j in range(k):
if nbrs1[i,j]==i:
continue
elif nbrs1[i,j] not in nbrs2[i]:
return False
return True
elif(nbrs1.ndim == 1):
N = len(nbrs1)
return numpy.all(nbrs1 == nbrs2)
N = 1000
ls = 1 # leaf size
k = 20
BT_results = []
KDT_results = []
for i in range(1, 10):
print 'Iteration %s' %i
D = i*100
M = np.random.random([N, D])
t0 = time()
BT = BallTree(M, ls)
d, nbrs1 = BT.query(M, k)
delta = time() - t0
BT_results.append(delta)
t0 = time()
KDT = cKDTree(M, ls)
d, nbrs2 = KDT.query(M, k)
delta = time() - t0
KDT_results.append(delta)
# this checks we get the correct result
assert compare_nbrs(nbrs1,nbrs2)
xx = 100*np.arange(1, 10)
pl.plot(xx, BT_results, label='scikits.learn (BallTree)')
pl.plot(xx, KDT_results, label='scipy (cKDTree)')
pl.xlabel('number of dimensions')
pl.ylabel('time (seconds)')
pl.legend()
pl.show()
|
Add another balltree benchmark, this time with plot interface.
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@598 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
from scikits.learn.BallTree import BallTree, knn_brute
import numpy as np
from time import time
from scipy.spatial import cKDTree
import sys
import pylab as pl
def compare_nbrs(nbrs1,nbrs2):
assert nbrs1.shape == nbrs2.shape
if(nbrs1.ndim == 2):
N,k = nbrs1.shape
for i in range(N):
for j in range(k):
if nbrs1[i,j]==i:
continue
elif nbrs1[i,j] not in nbrs2[i]:
return False
return True
elif(nbrs1.ndim == 1):
N = len(nbrs1)
return numpy.all(nbrs1 == nbrs2)
N = 1000
ls = 1 # leaf size
k = 20
BT_results = []
KDT_results = []
for i in range(1, 10):
print 'Iteration %s' %i
D = i*100
M = np.random.random([N, D])
t0 = time()
BT = BallTree(M, ls)
d, nbrs1 = BT.query(M, k)
delta = time() - t0
BT_results.append(delta)
t0 = time()
KDT = cKDTree(M, ls)
d, nbrs2 = KDT.query(M, k)
delta = time() - t0
KDT_results.append(delta)
# this checks we get the correct result
assert compare_nbrs(nbrs1,nbrs2)
xx = 100*np.arange(1, 10)
pl.plot(xx, BT_results, label='scikits.learn (BallTree)')
pl.plot(xx, KDT_results, label='scipy (cKDTree)')
pl.xlabel('number of dimensions')
pl.ylabel('time (seconds)')
pl.legend()
pl.show()
|
<commit_before><commit_msg>Add another balltree benchmark, this time with plot interface.
git-svn-id: a2d1b0e147e530765aaf3e1662d4a98e2f63c719@598 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8<commit_after>
from scikits.learn.BallTree import BallTree, knn_brute
import numpy as np
from time import time
from scipy.spatial import cKDTree
import sys
import pylab as pl
def compare_nbrs(nbrs1,nbrs2):
assert nbrs1.shape == nbrs2.shape
if(nbrs1.ndim == 2):
N,k = nbrs1.shape
for i in range(N):
for j in range(k):
if nbrs1[i,j]==i:
continue
elif nbrs1[i,j] not in nbrs2[i]:
return False
return True
elif(nbrs1.ndim == 1):
N = len(nbrs1)
return numpy.all(nbrs1 == nbrs2)
N = 1000
ls = 1 # leaf size
k = 20
BT_results = []
KDT_results = []
for i in range(1, 10):
print 'Iteration %s' %i
D = i*100
M = np.random.random([N, D])
t0 = time()
BT = BallTree(M, ls)
d, nbrs1 = BT.query(M, k)
delta = time() - t0
BT_results.append(delta)
t0 = time()
KDT = cKDTree(M, ls)
d, nbrs2 = KDT.query(M, k)
delta = time() - t0
KDT_results.append(delta)
# this checks we get the correct result
assert compare_nbrs(nbrs1,nbrs2)
xx = 100*np.arange(1, 10)
pl.plot(xx, BT_results, label='scikits.learn (BallTree)')
pl.plot(xx, KDT_results, label='scipy (cKDTree)')
pl.xlabel('number of dimensions')
pl.ylabel('time (seconds)')
pl.legend()
pl.show()
|
|
ce9434314fb4851e1573270d9b6a07d88d521ca1
|
awx/main/management/commands/tower_version.py
|
awx/main/management/commands/tower_version.py
|
# Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from awx import __version__ as tower_version
class Command(BaseCommand):
help = 'Emit the Tower version and exit'
def handle(self, *args, **options):
self.stdout.write(tower_version)
|
Add a metadata file to the tower home directory that lists the version of Tower
|
Add a metadata file to the tower home directory that lists the version
of Tower
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx
|
Add a metadata file to the tower home directory that lists the version
of Tower
|
# Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from awx import __version__ as tower_version
class Command(BaseCommand):
help = 'Emit the Tower version and exit'
def handle(self, *args, **options):
self.stdout.write(tower_version)
|
<commit_before><commit_msg>Add a metadata file to the tower home directory that lists the version
of Tower<commit_after>
|
# Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from awx import __version__ as tower_version
class Command(BaseCommand):
help = 'Emit the Tower version and exit'
def handle(self, *args, **options):
self.stdout.write(tower_version)
|
Add a metadata file to the tower home directory that lists the version
of Tower# Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from awx import __version__ as tower_version
class Command(BaseCommand):
help = 'Emit the Tower version and exit'
def handle(self, *args, **options):
self.stdout.write(tower_version)
|
<commit_before><commit_msg>Add a metadata file to the tower home directory that lists the version
of Tower<commit_after># Copyright (c) 2014 Ansible, Inc.
# All Rights Reserved
from django.core.management.base import BaseCommand
from awx import __version__ as tower_version
class Command(BaseCommand):
help = 'Emit the Tower version and exit'
def handle(self, *args, **options):
self.stdout.write(tower_version)
|
|
53eec0789a13014a65fd3151f17f4dead1ef1684
|
{{cookiecutter.repo_name}}/tests/test_i18n.py
|
{{cookiecutter.repo_name}}/tests/test_i18n.py
|
# -*- coding: utf-8 -*-
def test_switch_language(app):
test_label = app.carousel.slides[1].children[1]
english_text = (
"This app uses Kivy, a Python Framework for NUI Development."
)
assert test_label.text == english_text
app.on_config_change(app.config, 'user_settings', 'language', 'de')
german_text = (
"Diese App benutzt Kivy, ein Python Framework zur NUI Entwicklung."
)
assert test_label.text == german_text
|
Add another test for switching the language of the app
|
Add another test for switching the language of the app
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
Add another test for switching the language of the app
|
# -*- coding: utf-8 -*-
def test_switch_language(app):
test_label = app.carousel.slides[1].children[1]
english_text = (
"This app uses Kivy, a Python Framework for NUI Development."
)
assert test_label.text == english_text
app.on_config_change(app.config, 'user_settings', 'language', 'de')
german_text = (
"Diese App benutzt Kivy, ein Python Framework zur NUI Entwicklung."
)
assert test_label.text == german_text
|
<commit_before><commit_msg>Add another test for switching the language of the app<commit_after>
|
# -*- coding: utf-8 -*-
def test_switch_language(app):
test_label = app.carousel.slides[1].children[1]
english_text = (
"This app uses Kivy, a Python Framework for NUI Development."
)
assert test_label.text == english_text
app.on_config_change(app.config, 'user_settings', 'language', 'de')
german_text = (
"Diese App benutzt Kivy, ein Python Framework zur NUI Entwicklung."
)
assert test_label.text == german_text
|
Add another test for switching the language of the app# -*- coding: utf-8 -*-
def test_switch_language(app):
test_label = app.carousel.slides[1].children[1]
english_text = (
"This app uses Kivy, a Python Framework for NUI Development."
)
assert test_label.text == english_text
app.on_config_change(app.config, 'user_settings', 'language', 'de')
german_text = (
"Diese App benutzt Kivy, ein Python Framework zur NUI Entwicklung."
)
assert test_label.text == german_text
|
<commit_before><commit_msg>Add another test for switching the language of the app<commit_after># -*- coding: utf-8 -*-
def test_switch_language(app):
test_label = app.carousel.slides[1].children[1]
english_text = (
"This app uses Kivy, a Python Framework for NUI Development."
)
assert test_label.text == english_text
app.on_config_change(app.config, 'user_settings', 'language', 'de')
german_text = (
"Diese App benutzt Kivy, ein Python Framework zur NUI Entwicklung."
)
assert test_label.text == german_text
|
|
07289743a6324b76c94b423720d5b12944078d81
|
tests/test_irc_formatter.py
|
tests/test_irc_formatter.py
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import io
import os
import re
import unittest
import tempfile
from os.path import join, dirname, isfile
from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import IRCFormatter
import support
tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = IRCFormatter()
houtfile = StringIO()
hfmt.format(tokensource, houtfile)
self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
|
Add basic test for irc formatter
|
Add basic test for irc formatter
|
Python
|
bsd-2-clause
|
aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments
|
Add basic test for irc formatter
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import io
import os
import re
import unittest
import tempfile
from os.path import join, dirname, isfile
from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import IRCFormatter
import support
tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = IRCFormatter()
houtfile = StringIO()
hfmt.format(tokensource, houtfile)
self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
|
<commit_before><commit_msg>Add basic test for irc formatter<commit_after>
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import io
import os
import re
import unittest
import tempfile
from os.path import join, dirname, isfile
from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import IRCFormatter
import support
tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = IRCFormatter()
houtfile = StringIO()
hfmt.format(tokensource, houtfile)
self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
|
Add basic test for irc formatter# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import io
import os
import re
import unittest
import tempfile
from os.path import join, dirname, isfile
from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import IRCFormatter
import support
tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = IRCFormatter()
houtfile = StringIO()
hfmt.format(tokensource, houtfile)
self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
|
<commit_before><commit_msg>Add basic test for irc formatter<commit_after># -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import io
import os
import re
import unittest
import tempfile
from os.path import join, dirname, isfile
from pygments.util import StringIO
from pygments.lexers import PythonLexer
from pygments.formatters import IRCFormatter
import support
tokensource = list(PythonLexer().get_tokens("lambda x: 123"))
class HtmlFormatterTest(unittest.TestCase):
def test_correct_output(self):
hfmt = IRCFormatter()
houtfile = StringIO()
hfmt.format(tokensource, houtfile)
self.assertEqual(u'\x0302lambda\x03 x: \x0302123\x03\n', houtfile.getvalue())
|
|
873f21b6ef730d3367bb8ccf5624693d1c6b5a64
|
bin/jshelper.py
|
bin/jshelper.py
|
#!/usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import uuid
from datetime import timedelta
from aspen.utils import utcnow
import gratipay
from gratipay import wireup
from gratipay.models.participant import Participant
if len(sys.argv) < 2:
sys.exit('Usage: %s <user>' % sys.argv[0])
db = Participant.db = wireup.db(wireup.env())
gratipay.RESTRICTED_USERNAMES = os.listdir('./www/')
username = sys.argv[1]
session_token = uuid.uuid4().hex
session_expires = utcnow() + timedelta(hours=6)
try:
participant = Participant.from_username(username)
participant.db = db
except:
participant = Participant.with_random_username()
participant.db = db
participant.change_username(username)
participant.set_as_claimed()
participant.update_session(session_token, session_expires)
print(session_token)
|
Add helper for JS tests
|
Add helper for JS tests
|
Python
|
mit
|
gratipay/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com
|
Add helper for JS tests
|
#!/usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import uuid
from datetime import timedelta
from aspen.utils import utcnow
import gratipay
from gratipay import wireup
from gratipay.models.participant import Participant
if len(sys.argv) < 2:
sys.exit('Usage: %s <user>' % sys.argv[0])
db = Participant.db = wireup.db(wireup.env())
gratipay.RESTRICTED_USERNAMES = os.listdir('./www/')
username = sys.argv[1]
session_token = uuid.uuid4().hex
session_expires = utcnow() + timedelta(hours=6)
try:
participant = Participant.from_username(username)
participant.db = db
except:
participant = Participant.with_random_username()
participant.db = db
participant.change_username(username)
participant.set_as_claimed()
participant.update_session(session_token, session_expires)
print(session_token)
|
<commit_before><commit_msg>Add helper for JS tests<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import uuid
from datetime import timedelta
from aspen.utils import utcnow
import gratipay
from gratipay import wireup
from gratipay.models.participant import Participant
if len(sys.argv) < 2:
sys.exit('Usage: %s <user>' % sys.argv[0])
db = Participant.db = wireup.db(wireup.env())
gratipay.RESTRICTED_USERNAMES = os.listdir('./www/')
username = sys.argv[1]
session_token = uuid.uuid4().hex
session_expires = utcnow() + timedelta(hours=6)
try:
participant = Participant.from_username(username)
participant.db = db
except:
participant = Participant.with_random_username()
participant.db = db
participant.change_username(username)
participant.set_as_claimed()
participant.update_session(session_token, session_expires)
print(session_token)
|
Add helper for JS tests#!/usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import uuid
from datetime import timedelta
from aspen.utils import utcnow
import gratipay
from gratipay import wireup
from gratipay.models.participant import Participant
if len(sys.argv) < 2:
sys.exit('Usage: %s <user>' % sys.argv[0])
db = Participant.db = wireup.db(wireup.env())
gratipay.RESTRICTED_USERNAMES = os.listdir('./www/')
username = sys.argv[1]
session_token = uuid.uuid4().hex
session_expires = utcnow() + timedelta(hours=6)
try:
participant = Participant.from_username(username)
participant.db = db
except:
participant = Participant.with_random_username()
participant.db = db
participant.change_username(username)
participant.set_as_claimed()
participant.update_session(session_token, session_expires)
print(session_token)
|
<commit_before><commit_msg>Add helper for JS tests<commit_after>#!/usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import uuid
from datetime import timedelta
from aspen.utils import utcnow
import gratipay
from gratipay import wireup
from gratipay.models.participant import Participant
if len(sys.argv) < 2:
sys.exit('Usage: %s <user>' % sys.argv[0])
db = Participant.db = wireup.db(wireup.env())
gratipay.RESTRICTED_USERNAMES = os.listdir('./www/')
username = sys.argv[1]
session_token = uuid.uuid4().hex
session_expires = utcnow() + timedelta(hours=6)
try:
participant = Participant.from_username(username)
participant.db = db
except:
participant = Participant.with_random_username()
participant.db = db
participant.change_username(username)
participant.set_as_claimed()
participant.update_session(session_token, session_expires)
print(session_token)
|
|
ff626b328043ec4df86366bc65cf73901529d9d8
|
bin/json2gpx.py
|
bin/json2gpx.py
|
#!/usr/bin/env python
"""Convert JSON from Google Takeout to GPX"""
import json
import os
import time
import argparse
def write_gpx(filep, locations):
"""Write locations as GPX to filep."""
filep.write(
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<gpx creator="json2gpx" version="0.1" xmlns="http://www.topografix.com/GPX/1/0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">\n'
'<trk><name><![CDATA[Europe]]></name><trkseg>\n')
for point in locations:
filep.write('<trkpt lat="{:f}" lon="{:f}">\n'.format(
float(point['latitudeE7'])/1e7,
float(point['longitudeE7'])/1e7))
if 'altitude' in point:
filep.write(' <ele>{:f}</ele>\n'.format(
float(point['altitude'])))
filep.write(' <time>{:s}</time>\n'.format(
time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(float(point['timestampMs'])/1000))))
filep.write('</trkpt>\n')
filep.write('</trkseg></trk></gpx>\n')
def main():
"""Convert JSON to GPX."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("json", type=str,
help="Input JSON file")
args = parser.parse_args()
with open(args.json, 'r') as file_:
locations = json.load(file_)['locations']
locations.reverse()
with open(os.path.splitext(args.json)[0] + '.gpx', 'w') as file_:
write_gpx(file_, locations)
if __name__ == "__main__":
main()
|
Add script to convert JSON from Google to GPX
|
Add script to convert JSON from Google to GPX
|
Python
|
mit
|
jaantoots/dotfiles,jaantoots/dotfiles,jaantoots/dotfiles
|
Add script to convert JSON from Google to GPX
|
#!/usr/bin/env python
"""Convert JSON from Google Takeout to GPX"""
import json
import os
import time
import argparse
def write_gpx(filep, locations):
"""Write locations as GPX to filep."""
filep.write(
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<gpx creator="json2gpx" version="0.1" xmlns="http://www.topografix.com/GPX/1/0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">\n'
'<trk><name><![CDATA[Europe]]></name><trkseg>\n')
for point in locations:
filep.write('<trkpt lat="{:f}" lon="{:f}">\n'.format(
float(point['latitudeE7'])/1e7,
float(point['longitudeE7'])/1e7))
if 'altitude' in point:
filep.write(' <ele>{:f}</ele>\n'.format(
float(point['altitude'])))
filep.write(' <time>{:s}</time>\n'.format(
time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(float(point['timestampMs'])/1000))))
filep.write('</trkpt>\n')
filep.write('</trkseg></trk></gpx>\n')
def main():
"""Convert JSON to GPX."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("json", type=str,
help="Input JSON file")
args = parser.parse_args()
with open(args.json, 'r') as file_:
locations = json.load(file_)['locations']
locations.reverse()
with open(os.path.splitext(args.json)[0] + '.gpx', 'w') as file_:
write_gpx(file_, locations)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to convert JSON from Google to GPX<commit_after>
|
#!/usr/bin/env python
"""Convert JSON from Google Takeout to GPX"""
import json
import os
import time
import argparse
def write_gpx(filep, locations):
"""Write locations as GPX to filep."""
filep.write(
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<gpx creator="json2gpx" version="0.1" xmlns="http://www.topografix.com/GPX/1/0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">\n'
'<trk><name><![CDATA[Europe]]></name><trkseg>\n')
for point in locations:
filep.write('<trkpt lat="{:f}" lon="{:f}">\n'.format(
float(point['latitudeE7'])/1e7,
float(point['longitudeE7'])/1e7))
if 'altitude' in point:
filep.write(' <ele>{:f}</ele>\n'.format(
float(point['altitude'])))
filep.write(' <time>{:s}</time>\n'.format(
time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(float(point['timestampMs'])/1000))))
filep.write('</trkpt>\n')
filep.write('</trkseg></trk></gpx>\n')
def main():
"""Convert JSON to GPX."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("json", type=str,
help="Input JSON file")
args = parser.parse_args()
with open(args.json, 'r') as file_:
locations = json.load(file_)['locations']
locations.reverse()
with open(os.path.splitext(args.json)[0] + '.gpx', 'w') as file_:
write_gpx(file_, locations)
if __name__ == "__main__":
main()
|
Add script to convert JSON from Google to GPX#!/usr/bin/env python
"""Convert JSON from Google Takeout to GPX"""
import json
import os
import time
import argparse
def write_gpx(filep, locations):
"""Write locations as GPX to filep."""
filep.write(
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<gpx creator="json2gpx" version="0.1" xmlns="http://www.topografix.com/GPX/1/0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">\n'
'<trk><name><![CDATA[Europe]]></name><trkseg>\n')
for point in locations:
filep.write('<trkpt lat="{:f}" lon="{:f}">\n'.format(
float(point['latitudeE7'])/1e7,
float(point['longitudeE7'])/1e7))
if 'altitude' in point:
filep.write(' <ele>{:f}</ele>\n'.format(
float(point['altitude'])))
filep.write(' <time>{:s}</time>\n'.format(
time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(float(point['timestampMs'])/1000))))
filep.write('</trkpt>\n')
filep.write('</trkseg></trk></gpx>\n')
def main():
"""Convert JSON to GPX."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("json", type=str,
help="Input JSON file")
args = parser.parse_args()
with open(args.json, 'r') as file_:
locations = json.load(file_)['locations']
locations.reverse()
with open(os.path.splitext(args.json)[0] + '.gpx', 'w') as file_:
write_gpx(file_, locations)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to convert JSON from Google to GPX<commit_after>#!/usr/bin/env python
"""Convert JSON from Google Takeout to GPX"""
import json
import os
import time
import argparse
def write_gpx(filep, locations):
"""Write locations as GPX to filep."""
filep.write(
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<gpx creator="json2gpx" version="0.1" xmlns="http://www.topografix.com/GPX/1/0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">\n'
'<trk><name><![CDATA[Europe]]></name><trkseg>\n')
for point in locations:
filep.write('<trkpt lat="{:f}" lon="{:f}">\n'.format(
float(point['latitudeE7'])/1e7,
float(point['longitudeE7'])/1e7))
if 'altitude' in point:
filep.write(' <ele>{:f}</ele>\n'.format(
float(point['altitude'])))
filep.write(' <time>{:s}</time>\n'.format(
time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(float(point['timestampMs'])/1000))))
filep.write('</trkpt>\n')
filep.write('</trkseg></trk></gpx>\n')
def main():
"""Convert JSON to GPX."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("json", type=str,
help="Input JSON file")
args = parser.parse_args()
with open(args.json, 'r') as file_:
locations = json.load(file_)['locations']
locations.reverse()
with open(os.path.splitext(args.json)[0] + '.gpx', 'w') as file_:
write_gpx(file_, locations)
if __name__ == "__main__":
main()
|
|
6e46df3721137df511801e12178174ecc8566230
|
python/strip_trailing_whitespace.py
|
python/strip_trailing_whitespace.py
|
#!/usr/bin/python
from convert_line_endings import convert_line_endings
import os
import re
import sys
def strip_trailing_whitespace(file):
with open(file, mode='rt') as infile:
lines = infile.readlines()
with open(file, mode='wt') as outfile:
for line in lines:
stripped = re.sub('[ \t]+$', '', line)
outfile.write(stripped)
if sys.platform.startswith("win"):
convert_line_endings(file)
def main():
if len(sys.argv) > 1:
strip_trailing_whitespace(sys.argv[1])
return
for dirpath, dirnames, filenames in os.walk('.'):
for file in filenames:
if os.path.splitext(file)[1] == '.cs':
csPath = os.path.join(dirpath, file)
strip_trailing_whitespace(csPath)
if __name__ == "__main__":
main()
|
Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings
|
[trunk] Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings
|
Python
|
bsd-3-clause
|
markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation,markfinal/BuildAMation
|
[trunk] Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings
|
#!/usr/bin/python
from convert_line_endings import convert_line_endings
import os
import re
import sys
def strip_trailing_whitespace(file):
with open(file, mode='rt') as infile:
lines = infile.readlines()
with open(file, mode='wt') as outfile:
for line in lines:
stripped = re.sub('[ \t]+$', '', line)
outfile.write(stripped)
if sys.platform.startswith("win"):
convert_line_endings(file)
def main():
if len(sys.argv) > 1:
strip_trailing_whitespace(sys.argv[1])
return
for dirpath, dirnames, filenames in os.walk('.'):
for file in filenames:
if os.path.splitext(file)[1] == '.cs':
csPath = os.path.join(dirpath, file)
strip_trailing_whitespace(csPath)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>[trunk] Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings<commit_after>
|
#!/usr/bin/python
from convert_line_endings import convert_line_endings
import os
import re
import sys
def strip_trailing_whitespace(file):
with open(file, mode='rt') as infile:
lines = infile.readlines()
with open(file, mode='wt') as outfile:
for line in lines:
stripped = re.sub('[ \t]+$', '', line)
outfile.write(stripped)
if sys.platform.startswith("win"):
convert_line_endings(file)
def main():
if len(sys.argv) > 1:
strip_trailing_whitespace(sys.argv[1])
return
for dirpath, dirnames, filenames in os.walk('.'):
for file in filenames:
if os.path.splitext(file)[1] == '.cs':
csPath = os.path.join(dirpath, file)
strip_trailing_whitespace(csPath)
if __name__ == "__main__":
main()
|
[trunk] Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings#!/usr/bin/python
from convert_line_endings import convert_line_endings
import os
import re
import sys
def strip_trailing_whitespace(file):
with open(file, mode='rt') as infile:
lines = infile.readlines()
with open(file, mode='wt') as outfile:
for line in lines:
stripped = re.sub('[ \t]+$', '', line)
outfile.write(stripped)
if sys.platform.startswith("win"):
convert_line_endings(file)
def main():
if len(sys.argv) > 1:
strip_trailing_whitespace(sys.argv[1])
return
for dirpath, dirnames, filenames in os.walk('.'):
for file in filenames:
if os.path.splitext(file)[1] == '.cs':
csPath = os.path.join(dirpath, file)
strip_trailing_whitespace(csPath)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>[trunk] Add Python utility function to strip trailing whitespace from .cs files. On Windows, this will also convert line endings, because of the way that Python will always write a text file with platform line endings<commit_after>#!/usr/bin/python
from convert_line_endings import convert_line_endings
import os
import re
import sys
def strip_trailing_whitespace(file):
with open(file, mode='rt') as infile:
lines = infile.readlines()
with open(file, mode='wt') as outfile:
for line in lines:
stripped = re.sub('[ \t]+$', '', line)
outfile.write(stripped)
if sys.platform.startswith("win"):
convert_line_endings(file)
def main():
if len(sys.argv) > 1:
strip_trailing_whitespace(sys.argv[1])
return
for dirpath, dirnames, filenames in os.walk('.'):
for file in filenames:
if os.path.splitext(file)[1] == '.cs':
csPath = os.path.join(dirpath, file)
strip_trailing_whitespace(csPath)
if __name__ == "__main__":
main()
|
|
7b994e73816b1d2ff08a4eb26f06404d7851e5bb
|
profiling/replay_buffers.py
|
profiling/replay_buffers.py
|
from unittest.mock import Mock
import time
from os.path import expanduser
from tqdm import tqdm
from avalanche.benchmarks import fixed_size_experience_split, SplitMNIST
from avalanche.training import ReservoirSamplingBuffer
from avalanche.training import ParametricBuffer
benchmark = SplitMNIST(
n_experiences=5,
dataset_root=expanduser("~") + "/.avalanche/data/mnist/",
)
experience = benchmark.train_stream[0]
print("len experience: ", len(experience.dataset))
# start = time.time()
# buffer = ReservoirSamplingBuffer(100)
# for exp in tqdm(fixed_size_experience_split(experience, 1)):
# buffer.update_from_dataset(exp.dataset)
#
# end = time.time()
# duration = end - start
# print("ReservoirSampling Duration: ", duration)
start = time.time()
buffer = ParametricBuffer(100)
for exp in tqdm(fixed_size_experience_split(experience, 1)):
buffer.update(Mock(experience=exp, dataset=exp.dataset))
end = time.time()
duration = end - start
print("ParametricBuffer (random sampling) Duration: ", duration)
|
ADD profiling script for replay buffer (OCL benchmarking)
|
ADD profiling script for replay buffer (OCL benchmarking)
|
Python
|
mit
|
ContinualAI/avalanche,ContinualAI/avalanche
|
ADD profiling script for replay buffer (OCL benchmarking)
|
from unittest.mock import Mock
import time
from os.path import expanduser
from tqdm import tqdm
from avalanche.benchmarks import fixed_size_experience_split, SplitMNIST
from avalanche.training import ReservoirSamplingBuffer
from avalanche.training import ParametricBuffer
benchmark = SplitMNIST(
n_experiences=5,
dataset_root=expanduser("~") + "/.avalanche/data/mnist/",
)
experience = benchmark.train_stream[0]
print("len experience: ", len(experience.dataset))
# start = time.time()
# buffer = ReservoirSamplingBuffer(100)
# for exp in tqdm(fixed_size_experience_split(experience, 1)):
# buffer.update_from_dataset(exp.dataset)
#
# end = time.time()
# duration = end - start
# print("ReservoirSampling Duration: ", duration)
start = time.time()
buffer = ParametricBuffer(100)
for exp in tqdm(fixed_size_experience_split(experience, 1)):
buffer.update(Mock(experience=exp, dataset=exp.dataset))
end = time.time()
duration = end - start
print("ParametricBuffer (random sampling) Duration: ", duration)
|
<commit_before><commit_msg>ADD profiling script for replay buffer (OCL benchmarking)<commit_after>
|
from unittest.mock import Mock
import time
from os.path import expanduser
from tqdm import tqdm
from avalanche.benchmarks import fixed_size_experience_split, SplitMNIST
from avalanche.training import ReservoirSamplingBuffer
from avalanche.training import ParametricBuffer
benchmark = SplitMNIST(
n_experiences=5,
dataset_root=expanduser("~") + "/.avalanche/data/mnist/",
)
experience = benchmark.train_stream[0]
print("len experience: ", len(experience.dataset))
# start = time.time()
# buffer = ReservoirSamplingBuffer(100)
# for exp in tqdm(fixed_size_experience_split(experience, 1)):
# buffer.update_from_dataset(exp.dataset)
#
# end = time.time()
# duration = end - start
# print("ReservoirSampling Duration: ", duration)
start = time.time()
buffer = ParametricBuffer(100)
for exp in tqdm(fixed_size_experience_split(experience, 1)):
buffer.update(Mock(experience=exp, dataset=exp.dataset))
end = time.time()
duration = end - start
print("ParametricBuffer (random sampling) Duration: ", duration)
|
ADD profiling script for replay buffer (OCL benchmarking)from unittest.mock import Mock
import time
from os.path import expanduser
from tqdm import tqdm
from avalanche.benchmarks import fixed_size_experience_split, SplitMNIST
from avalanche.training import ReservoirSamplingBuffer
from avalanche.training import ParametricBuffer
benchmark = SplitMNIST(
n_experiences=5,
dataset_root=expanduser("~") + "/.avalanche/data/mnist/",
)
experience = benchmark.train_stream[0]
print("len experience: ", len(experience.dataset))
# start = time.time()
# buffer = ReservoirSamplingBuffer(100)
# for exp in tqdm(fixed_size_experience_split(experience, 1)):
# buffer.update_from_dataset(exp.dataset)
#
# end = time.time()
# duration = end - start
# print("ReservoirSampling Duration: ", duration)
start = time.time()
buffer = ParametricBuffer(100)
for exp in tqdm(fixed_size_experience_split(experience, 1)):
buffer.update(Mock(experience=exp, dataset=exp.dataset))
end = time.time()
duration = end - start
print("ParametricBuffer (random sampling) Duration: ", duration)
|
<commit_before><commit_msg>ADD profiling script for replay buffer (OCL benchmarking)<commit_after>from unittest.mock import Mock
import time
from os.path import expanduser
from tqdm import tqdm
from avalanche.benchmarks import fixed_size_experience_split, SplitMNIST
from avalanche.training import ReservoirSamplingBuffer
from avalanche.training import ParametricBuffer
benchmark = SplitMNIST(
n_experiences=5,
dataset_root=expanduser("~") + "/.avalanche/data/mnist/",
)
experience = benchmark.train_stream[0]
print("len experience: ", len(experience.dataset))
# start = time.time()
# buffer = ReservoirSamplingBuffer(100)
# for exp in tqdm(fixed_size_experience_split(experience, 1)):
# buffer.update_from_dataset(exp.dataset)
#
# end = time.time()
# duration = end - start
# print("ReservoirSampling Duration: ", duration)
start = time.time()
buffer = ParametricBuffer(100)
for exp in tqdm(fixed_size_experience_split(experience, 1)):
buffer.update(Mock(experience=exp, dataset=exp.dataset))
end = time.time()
duration = end - start
print("ParametricBuffer (random sampling) Duration: ", duration)
|
|
62601372edf68882bd2a868d2ac3c1fb19dbc75b
|
comics/crawler/crawlers/gws.py
|
comics/crawler/crawlers/gws.py
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Girls With Slingshots'
language = 'en'
url = 'http://www.girlswithslingshot.com/'
start_date = '2004-09-30'
history_capable_days = 1
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
rights = 'Danielle Corsetto'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.web_url = 'http://www.daniellecorsetto.com/gws.html'
self.parse_web_page()
for img in self.web_page.imgs:
if 'src' in img and img['src'].startswith('images/gws/GWS'):
self.url = self.join_web_url(img['src'])
return
|
Add crawler for 'Girls With Slingshots'
|
Add crawler for 'Girls With Slingshots'
|
Python
|
agpl-3.0
|
datagutten/comics,klette/comics,klette/comics,klette/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics
|
Add crawler for 'Girls With Slingshots'
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Girls With Slingshots'
language = 'en'
url = 'http://www.girlswithslingshot.com/'
start_date = '2004-09-30'
history_capable_days = 1
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
rights = 'Danielle Corsetto'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.web_url = 'http://www.daniellecorsetto.com/gws.html'
self.parse_web_page()
for img in self.web_page.imgs:
if 'src' in img and img['src'].startswith('images/gws/GWS'):
self.url = self.join_web_url(img['src'])
return
|
<commit_before><commit_msg>Add crawler for 'Girls With Slingshots'<commit_after>
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Girls With Slingshots'
language = 'en'
url = 'http://www.girlswithslingshot.com/'
start_date = '2004-09-30'
history_capable_days = 1
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
rights = 'Danielle Corsetto'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.web_url = 'http://www.daniellecorsetto.com/gws.html'
self.parse_web_page()
for img in self.web_page.imgs:
if 'src' in img and img['src'].startswith('images/gws/GWS'):
self.url = self.join_web_url(img['src'])
return
|
Add crawler for 'Girls With Slingshots'from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Girls With Slingshots'
language = 'en'
url = 'http://www.girlswithslingshot.com/'
start_date = '2004-09-30'
history_capable_days = 1
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
rights = 'Danielle Corsetto'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.web_url = 'http://www.daniellecorsetto.com/gws.html'
self.parse_web_page()
for img in self.web_page.imgs:
if 'src' in img and img['src'].startswith('images/gws/GWS'):
self.url = self.join_web_url(img['src'])
return
|
<commit_before><commit_msg>Add crawler for 'Girls With Slingshots'<commit_after>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Girls With Slingshots'
language = 'en'
url = 'http://www.girlswithslingshot.com/'
start_date = '2004-09-30'
history_capable_days = 1
schedule = 'Mo,Tu,We,Th,Fr'
time_zone = -5
rights = 'Danielle Corsetto'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.web_url = 'http://www.daniellecorsetto.com/gws.html'
self.parse_web_page()
for img in self.web_page.imgs:
if 'src' in img and img['src'].startswith('images/gws/GWS'):
self.url = self.join_web_url(img['src'])
return
|
|
4ca0db457ea9d38d0d151470b53f729978c95fa8
|
sli/config/scripts/sharding/binary-to-string.py
|
sli/config/scripts/sharding/binary-to-string.py
|
import sys
import re
import json
import base64
from binascii import a2b_base64,hexlify
# cd sli/acceptance-tests
# find . -name *.json | xargs -I file python ~/wgen/SLI/sli/config/scripts/sharding/binary-to-string.py file
fixture_lines = []
if len(sys.argv) == 2:
fixture_file = open(sys.argv[1])
fixture_lines = fixture_file.read().splitlines()
fixture_file.close()
else:
exit(0)
def toJUUID(base64str):
hexstr = hexlify(a2b_base64(base64str))
msb = hexstr[:16]
lsb = hexstr[16:32]
msb = msb[14:16] + msb[12:14] + msb[10:12] + msb[8:10] + msb[6:8] + msb[4:6] + msb[2:4] + msb[0:2]
lsb = lsb[14:16] + lsb[12:14] + lsb[10:12] + lsb[8:10] + lsb[6:8] + lsb[4:6] + lsb[2:4] + lsb[0:2]
hexstr = msb+lsb
return hexstr[0:8] + "-" + hexstr[8:12] + "-" + hexstr[12:16] + "-" + hexstr[16:20] + "-" + hexstr[20:32]
id_regex = re.compile(r'{\s*"\$binary"\s*:\s*"([a-zA-Z0-9=/+]*)"\s*,\s*"\$type"\s*:\s*"03"\s*}')
outfile = open(sys.argv[1], 'w')
for line in fixture_lines:
match = id_regex.search(line)
#print "%s | %s" % (match.group(1), base64.b64decode(match.group(1).encode()))
#print "%s | %s" % (match.group(1), hexlify(a2b_base64(match.group(1))))
#print "%s - %s" % (match.group(1), toJUUID(match.group(1)))
if match != None:
outfile.write(line.replace(match.group(0), '"' + toJUUID(match.group(1)) + '"') + '\n')
outfile.close()
|
Add script to convert binary UUIDs in fixture data to strings.
|
Add script to convert binary UUIDs in fixture data to strings.
|
Python
|
apache-2.0
|
inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service,inbloom/secure-data-service
|
Add script to convert binary UUIDs in fixture data to strings.
|
import sys
import re
import json
import base64
from binascii import a2b_base64,hexlify
# cd sli/acceptance-tests
# find . -name *.json | xargs -I file python ~/wgen/SLI/sli/config/scripts/sharding/binary-to-string.py file
fixture_lines = []
if len(sys.argv) == 2:
fixture_file = open(sys.argv[1])
fixture_lines = fixture_file.read().splitlines()
fixture_file.close()
else:
exit(0)
def toJUUID(base64str):
hexstr = hexlify(a2b_base64(base64str))
msb = hexstr[:16]
lsb = hexstr[16:32]
msb = msb[14:16] + msb[12:14] + msb[10:12] + msb[8:10] + msb[6:8] + msb[4:6] + msb[2:4] + msb[0:2]
lsb = lsb[14:16] + lsb[12:14] + lsb[10:12] + lsb[8:10] + lsb[6:8] + lsb[4:6] + lsb[2:4] + lsb[0:2]
hexstr = msb+lsb
return hexstr[0:8] + "-" + hexstr[8:12] + "-" + hexstr[12:16] + "-" + hexstr[16:20] + "-" + hexstr[20:32]
id_regex = re.compile(r'{\s*"\$binary"\s*:\s*"([a-zA-Z0-9=/+]*)"\s*,\s*"\$type"\s*:\s*"03"\s*}')
outfile = open(sys.argv[1], 'w')
for line in fixture_lines:
match = id_regex.search(line)
#print "%s | %s" % (match.group(1), base64.b64decode(match.group(1).encode()))
#print "%s | %s" % (match.group(1), hexlify(a2b_base64(match.group(1))))
#print "%s - %s" % (match.group(1), toJUUID(match.group(1)))
if match != None:
outfile.write(line.replace(match.group(0), '"' + toJUUID(match.group(1)) + '"') + '\n')
outfile.close()
|
<commit_before><commit_msg>Add script to convert binary UUIDs in fixture data to strings.<commit_after>
|
import sys
import re
import json
import base64
from binascii import a2b_base64,hexlify
# cd sli/acceptance-tests
# find . -name *.json | xargs -I file python ~/wgen/SLI/sli/config/scripts/sharding/binary-to-string.py file
fixture_lines = []
if len(sys.argv) == 2:
fixture_file = open(sys.argv[1])
fixture_lines = fixture_file.read().splitlines()
fixture_file.close()
else:
exit(0)
def toJUUID(base64str):
hexstr = hexlify(a2b_base64(base64str))
msb = hexstr[:16]
lsb = hexstr[16:32]
msb = msb[14:16] + msb[12:14] + msb[10:12] + msb[8:10] + msb[6:8] + msb[4:6] + msb[2:4] + msb[0:2]
lsb = lsb[14:16] + lsb[12:14] + lsb[10:12] + lsb[8:10] + lsb[6:8] + lsb[4:6] + lsb[2:4] + lsb[0:2]
hexstr = msb+lsb
return hexstr[0:8] + "-" + hexstr[8:12] + "-" + hexstr[12:16] + "-" + hexstr[16:20] + "-" + hexstr[20:32]
id_regex = re.compile(r'{\s*"\$binary"\s*:\s*"([a-zA-Z0-9=/+]*)"\s*,\s*"\$type"\s*:\s*"03"\s*}')
outfile = open(sys.argv[1], 'w')
for line in fixture_lines:
match = id_regex.search(line)
#print "%s | %s" % (match.group(1), base64.b64decode(match.group(1).encode()))
#print "%s | %s" % (match.group(1), hexlify(a2b_base64(match.group(1))))
#print "%s - %s" % (match.group(1), toJUUID(match.group(1)))
if match != None:
outfile.write(line.replace(match.group(0), '"' + toJUUID(match.group(1)) + '"') + '\n')
outfile.close()
|
Add script to convert binary UUIDs in fixture data to strings.import sys
import re
import json
import base64
from binascii import a2b_base64,hexlify
# cd sli/acceptance-tests
# find . -name *.json | xargs -I file python ~/wgen/SLI/sli/config/scripts/sharding/binary-to-string.py file
fixture_lines = []
if len(sys.argv) == 2:
fixture_file = open(sys.argv[1])
fixture_lines = fixture_file.read().splitlines()
fixture_file.close()
else:
exit(0)
def toJUUID(base64str):
hexstr = hexlify(a2b_base64(base64str))
msb = hexstr[:16]
lsb = hexstr[16:32]
msb = msb[14:16] + msb[12:14] + msb[10:12] + msb[8:10] + msb[6:8] + msb[4:6] + msb[2:4] + msb[0:2]
lsb = lsb[14:16] + lsb[12:14] + lsb[10:12] + lsb[8:10] + lsb[6:8] + lsb[4:6] + lsb[2:4] + lsb[0:2]
hexstr = msb+lsb
return hexstr[0:8] + "-" + hexstr[8:12] + "-" + hexstr[12:16] + "-" + hexstr[16:20] + "-" + hexstr[20:32]
id_regex = re.compile(r'{\s*"\$binary"\s*:\s*"([a-zA-Z0-9=/+]*)"\s*,\s*"\$type"\s*:\s*"03"\s*}')
outfile = open(sys.argv[1], 'w')
for line in fixture_lines:
match = id_regex.search(line)
#print "%s | %s" % (match.group(1), base64.b64decode(match.group(1).encode()))
#print "%s | %s" % (match.group(1), hexlify(a2b_base64(match.group(1))))
#print "%s - %s" % (match.group(1), toJUUID(match.group(1)))
if match != None:
outfile.write(line.replace(match.group(0), '"' + toJUUID(match.group(1)) + '"') + '\n')
outfile.close()
|
<commit_before><commit_msg>Add script to convert binary UUIDs in fixture data to strings.<commit_after>import sys
import re
import json
import base64
from binascii import a2b_base64,hexlify
# cd sli/acceptance-tests
# find . -name *.json | xargs -I file python ~/wgen/SLI/sli/config/scripts/sharding/binary-to-string.py file
fixture_lines = []
if len(sys.argv) == 2:
fixture_file = open(sys.argv[1])
fixture_lines = fixture_file.read().splitlines()
fixture_file.close()
else:
exit(0)
def toJUUID(base64str):
hexstr = hexlify(a2b_base64(base64str))
msb = hexstr[:16]
lsb = hexstr[16:32]
msb = msb[14:16] + msb[12:14] + msb[10:12] + msb[8:10] + msb[6:8] + msb[4:6] + msb[2:4] + msb[0:2]
lsb = lsb[14:16] + lsb[12:14] + lsb[10:12] + lsb[8:10] + lsb[6:8] + lsb[4:6] + lsb[2:4] + lsb[0:2]
hexstr = msb+lsb
return hexstr[0:8] + "-" + hexstr[8:12] + "-" + hexstr[12:16] + "-" + hexstr[16:20] + "-" + hexstr[20:32]
id_regex = re.compile(r'{\s*"\$binary"\s*:\s*"([a-zA-Z0-9=/+]*)"\s*,\s*"\$type"\s*:\s*"03"\s*}')
outfile = open(sys.argv[1], 'w')
for line in fixture_lines:
match = id_regex.search(line)
#print "%s | %s" % (match.group(1), base64.b64decode(match.group(1).encode()))
#print "%s | %s" % (match.group(1), hexlify(a2b_base64(match.group(1))))
#print "%s - %s" % (match.group(1), toJUUID(match.group(1)))
if match != None:
outfile.write(line.replace(match.group(0), '"' + toJUUID(match.group(1)) + '"') + '\n')
outfile.close()
|
|
adab4c914d759f84731bc736fc9afe9862f8222e
|
tests/backends/gstreamer.py
|
tests/backends/gstreamer.py
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uri = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uris = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
Fix typo in GStreamer test
|
Fix typo in GStreamer test
|
Python
|
apache-2.0
|
woutervanwijk/mopidy,hkariti/mopidy,quartz55/mopidy,woutervanwijk/mopidy,swak/mopidy,vrs01/mopidy,vrs01/mopidy,tkem/mopidy,jmarsik/mopidy,kingosticks/mopidy,swak/mopidy,mokieyue/mopidy,kingosticks/mopidy,bacontext/mopidy,glogiotatidis/mopidy,priestd09/mopidy,mokieyue/mopidy,ali/mopidy,hkariti/mopidy,mokieyue/mopidy,tkem/mopidy,mopidy/mopidy,jcass77/mopidy,pacificIT/mopidy,rawdlite/mopidy,priestd09/mopidy,jodal/mopidy,bencevans/mopidy,abarisain/mopidy,dbrgn/mopidy,jodal/mopidy,priestd09/mopidy,pacificIT/mopidy,bacontext/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,adamcik/mopidy,pacificIT/mopidy,dbrgn/mopidy,jcass77/mopidy,vrs01/mopidy,SuperStarPL/mopidy,vrs01/mopidy,ZenithDK/mopidy,mopidy/mopidy,kingosticks/mopidy,tkem/mopidy,glogiotatidis/mopidy,bencevans/mopidy,quartz55/mopidy,diandiankan/mopidy,mokieyue/mopidy,adamcik/mopidy,rawdlite/mopidy,liamw9534/mopidy,liamw9534/mopidy,ZenithDK/mopidy,adamcik/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,quartz55/mopidy,hkariti/mopidy,jmarsik/mopidy,rawdlite/mopidy,swak/mopidy,ali/mopidy,quartz55/mopidy,abarisain/mopidy,mopidy/mopidy,bacontext/mopidy,glogiotatidis/mopidy,bacontext/mopidy,diandiankan/mopidy,rawdlite/mopidy,diandiankan/mopidy,dbrgn/mopidy,jodal/mopidy,dbrgn/mopidy,jmarsik/mopidy,bencevans/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,swak/mopidy,SuperStarPL/mopidy,ali/mopidy,ali/mopidy,bencevans/mopidy,jmarsik/mopidy,jcass77/mopidy,hkariti/mopidy,tkem/mopidy
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uri = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
Fix typo in GStreamer test
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uris = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uri = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix typo in GStreamer test<commit_after>
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uris = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uri = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
Fix typo in GStreamer testimport unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uris = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uri = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix typo in GStreamer test<commit_after>import unittest
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
uris = ['file://data/song1.mp3',
'file://data/song2.mp3',
'file://data/song3.mp3',
]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
afd7ca3f2ac5bdbda088e8fefd334bfde102ee55
|
soccerway/concat_matches.py
|
soccerway/concat_matches.py
|
# -*- coding: utf-8 -*-
import glob, os, sys
import pandas as pd
d = '/home/tvl/dev/scrapy-soccerway/soccerway/data/h/'
frames = []
os.chdir(d)
print('Read csv files:')
for f in glob.glob("matches*.csv"):
print(f)
frames.append(pd.read_csv(d+f))
df = pd.concat(frames)
df.set_index('id', inplace=True)
df.sort_values(by='datetime', inplace=True)
print('Totals:')
print(df.count())
df.to_csv(d+'historical2010-2120.csv', sep=',', encoding='utf-8')
print('Dataframe size (bytes): {}'.format(sys.getsizeof(df)))
|
Concatenate historical matches csv files
|
Concatenate historical matches csv files
|
Python
|
apache-2.0
|
tvl/scrapy-soccerway
|
Concatenate historical matches csv files
|
# -*- coding: utf-8 -*-
import glob, os, sys
import pandas as pd
d = '/home/tvl/dev/scrapy-soccerway/soccerway/data/h/'
frames = []
os.chdir(d)
print('Read csv files:')
for f in glob.glob("matches*.csv"):
print(f)
frames.append(pd.read_csv(d+f))
df = pd.concat(frames)
df.set_index('id', inplace=True)
df.sort_values(by='datetime', inplace=True)
print('Totals:')
print(df.count())
df.to_csv(d+'historical2010-2120.csv', sep=',', encoding='utf-8')
print('Dataframe size (bytes): {}'.format(sys.getsizeof(df)))
|
<commit_before><commit_msg>Concatenate historical matches csv files<commit_after>
|
# -*- coding: utf-8 -*-
import glob, os, sys
import pandas as pd
d = '/home/tvl/dev/scrapy-soccerway/soccerway/data/h/'
frames = []
os.chdir(d)
print('Read csv files:')
for f in glob.glob("matches*.csv"):
print(f)
frames.append(pd.read_csv(d+f))
df = pd.concat(frames)
df.set_index('id', inplace=True)
df.sort_values(by='datetime', inplace=True)
print('Totals:')
print(df.count())
df.to_csv(d+'historical2010-2120.csv', sep=',', encoding='utf-8')
print('Dataframe size (bytes): {}'.format(sys.getsizeof(df)))
|
Concatenate historical matches csv files# -*- coding: utf-8 -*-
import glob, os, sys
import pandas as pd
d = '/home/tvl/dev/scrapy-soccerway/soccerway/data/h/'
frames = []
os.chdir(d)
print('Read csv files:')
for f in glob.glob("matches*.csv"):
print(f)
frames.append(pd.read_csv(d+f))
df = pd.concat(frames)
df.set_index('id', inplace=True)
df.sort_values(by='datetime', inplace=True)
print('Totals:')
print(df.count())
df.to_csv(d+'historical2010-2120.csv', sep=',', encoding='utf-8')
print('Dataframe size (bytes): {}'.format(sys.getsizeof(df)))
|
<commit_before><commit_msg>Concatenate historical matches csv files<commit_after># -*- coding: utf-8 -*-
import glob, os, sys
import pandas as pd
d = '/home/tvl/dev/scrapy-soccerway/soccerway/data/h/'
frames = []
os.chdir(d)
print('Read csv files:')
for f in glob.glob("matches*.csv"):
print(f)
frames.append(pd.read_csv(d+f))
df = pd.concat(frames)
df.set_index('id', inplace=True)
df.sort_values(by='datetime', inplace=True)
print('Totals:')
print(df.count())
df.to_csv(d+'historical2010-2120.csv', sep=',', encoding='utf-8')
print('Dataframe size (bytes): {}'.format(sys.getsizeof(df)))
|
|
afb4e246e904c4586ef3df13eca019350b481b35
|
utils/auth/backends/ldap.py
|
utils/auth/backends/ldap.py
|
from django_auth_ldap.backend import LDAPBackend
from utils.auth.backends import get_or_create_user
class MultiLDAPBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
backend = self.__module__ + "." + self.__class__.__name__
return get_or_create_user(backend, username)
|
Add an LDAP backend using the authentication data table
|
auth: Add an LDAP backend using the authentication data table
|
Python
|
bsd-3-clause
|
Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia
|
auth: Add an LDAP backend using the authentication data table
|
from django_auth_ldap.backend import LDAPBackend
from utils.auth.backends import get_or_create_user
class MultiLDAPBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
backend = self.__module__ + "." + self.__class__.__name__
return get_or_create_user(backend, username)
|
<commit_before><commit_msg>auth: Add an LDAP backend using the authentication data table<commit_after>
|
from django_auth_ldap.backend import LDAPBackend
from utils.auth.backends import get_or_create_user
class MultiLDAPBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
backend = self.__module__ + "." + self.__class__.__name__
return get_or_create_user(backend, username)
|
auth: Add an LDAP backend using the authentication data tablefrom django_auth_ldap.backend import LDAPBackend
from utils.auth.backends import get_or_create_user
class MultiLDAPBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
backend = self.__module__ + "." + self.__class__.__name__
return get_or_create_user(backend, username)
|
<commit_before><commit_msg>auth: Add an LDAP backend using the authentication data table<commit_after>from django_auth_ldap.backend import LDAPBackend
from utils.auth.backends import get_or_create_user
class MultiLDAPBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
backend = self.__module__ + "." + self.__class__.__name__
return get_or_create_user(backend, username)
|
|
d808b1188da8f3bcfb01738677a18d6b0c386a37
|
percept-proto/workflows/commands/list_tasks.py
|
percept-proto/workflows/commands/list_tasks.py
|
"""
List all available tasks
"""
from management.commands import BaseCommand
from utils.registry import registry, find_in_registry
from utils.models import get_task_name
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
def command(self, *args, **options):
print "Available tasks:"
print " Name - Help"
for entry in registry:
cls = entry.cls
name = get_task_name(cls)
help = getattr(cls, "help", "")
print "{0} - {1}".format(name, help)
|
Add command to list available tasks and help
|
Add command to list available tasks and help
|
Python
|
apache-2.0
|
VikParuchuri/percept,VikParuchuri/percept
|
Add command to list available tasks and help
|
"""
List all available tasks
"""
from management.commands import BaseCommand
from utils.registry import registry, find_in_registry
from utils.models import get_task_name
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
def command(self, *args, **options):
print "Available tasks:"
print " Name - Help"
for entry in registry:
cls = entry.cls
name = get_task_name(cls)
help = getattr(cls, "help", "")
print "{0} - {1}".format(name, help)
|
<commit_before><commit_msg>Add command to list available tasks and help<commit_after>
|
"""
List all available tasks
"""
from management.commands import BaseCommand
from utils.registry import registry, find_in_registry
from utils.models import get_task_name
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
def command(self, *args, **options):
print "Available tasks:"
print " Name - Help"
for entry in registry:
cls = entry.cls
name = get_task_name(cls)
help = getattr(cls, "help", "")
print "{0} - {1}".format(name, help)
|
Add command to list available tasks and help"""
List all available tasks
"""
from management.commands import BaseCommand
from utils.registry import registry, find_in_registry
from utils.models import get_task_name
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
def command(self, *args, **options):
print "Available tasks:"
print " Name - Help"
for entry in registry:
cls = entry.cls
name = get_task_name(cls)
help = getattr(cls, "help", "")
print "{0} - {1}".format(name, help)
|
<commit_before><commit_msg>Add command to list available tasks and help<commit_after>"""
List all available tasks
"""
from management.commands import BaseCommand
from utils.registry import registry, find_in_registry
from utils.models import get_task_name
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
def command(self, *args, **options):
print "Available tasks:"
print " Name - Help"
for entry in registry:
cls = entry.cls
name = get_task_name(cls)
help = getattr(cls, "help", "")
print "{0} - {1}".format(name, help)
|
|
672027dce8a452f15847963bfa36df035bca2d14
|
vrtracking.py
|
vrtracking.py
|
import sys
import time
import openvr
openvr.init(openvr.VRApplication_Scene)
poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
poses = poses_t()
while True:
openvr.VRCompositor().waitGetPoses(poses, len(poses), None, 0)
hmd_pose = poses[openvr.k_unTrackedDeviceIndex_Hmd]
#print(hmd_pose.mDeviceToAbsoluteTracking)
print(poses[3].mDeviceToAbsoluteTracking)
sys.stdout.flush()
time.sleep(0.2)
openvr.shutdown()
'''
poses[0] = Headset
poses[1] = basestation??
poses[2] = ???
poses[3] = controller 1
poses[4] = controller 2
[[?, ?, ?, X(?)],
[?, ?, ?, Y(?)],
[Yaw, ?, ?, Z(?)]]
'''
|
Add file for tracking Vive data
|
Add file for tracking Vive data
The data includes position, pitch, yaw and roll for the HMD and controllers
|
Python
|
mit
|
EiT-VR-2017-Gruppe-2/EV3-code
|
Add file for tracking Vive data
The data includes position, pitch, yaw and roll for the HMD and controllers
|
import sys
import time
import openvr
openvr.init(openvr.VRApplication_Scene)
poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
poses = poses_t()
while True:
openvr.VRCompositor().waitGetPoses(poses, len(poses), None, 0)
hmd_pose = poses[openvr.k_unTrackedDeviceIndex_Hmd]
#print(hmd_pose.mDeviceToAbsoluteTracking)
print(poses[3].mDeviceToAbsoluteTracking)
sys.stdout.flush()
time.sleep(0.2)
openvr.shutdown()
'''
poses[0] = Headset
poses[1] = basestation??
poses[2] = ???
poses[3] = controller 1
poses[4] = controller 2
[[?, ?, ?, X(?)],
[?, ?, ?, Y(?)],
[Yaw, ?, ?, Z(?)]]
'''
|
<commit_before><commit_msg>Add file for tracking Vive data
The data includes position, pitch, yaw and roll for the HMD and controllers<commit_after>
|
import sys
import time
import openvr
openvr.init(openvr.VRApplication_Scene)
poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
poses = poses_t()
while True:
openvr.VRCompositor().waitGetPoses(poses, len(poses), None, 0)
hmd_pose = poses[openvr.k_unTrackedDeviceIndex_Hmd]
#print(hmd_pose.mDeviceToAbsoluteTracking)
print(poses[3].mDeviceToAbsoluteTracking)
sys.stdout.flush()
time.sleep(0.2)
openvr.shutdown()
'''
poses[0] = Headset
poses[1] = basestation??
poses[2] = ???
poses[3] = controller 1
poses[4] = controller 2
[[?, ?, ?, X(?)],
[?, ?, ?, Y(?)],
[Yaw, ?, ?, Z(?)]]
'''
|
Add file for tracking Vive data
The data includes position, pitch, yaw and roll for the HMD and controllersimport sys
import time
import openvr
openvr.init(openvr.VRApplication_Scene)
poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
poses = poses_t()
while True:
openvr.VRCompositor().waitGetPoses(poses, len(poses), None, 0)
hmd_pose = poses[openvr.k_unTrackedDeviceIndex_Hmd]
#print(hmd_pose.mDeviceToAbsoluteTracking)
print(poses[3].mDeviceToAbsoluteTracking)
sys.stdout.flush()
time.sleep(0.2)
openvr.shutdown()
'''
poses[0] = Headset
poses[1] = basestation??
poses[2] = ???
poses[3] = controller 1
poses[4] = controller 2
[[?, ?, ?, X(?)],
[?, ?, ?, Y(?)],
[Yaw, ?, ?, Z(?)]]
'''
|
<commit_before><commit_msg>Add file for tracking Vive data
The data includes position, pitch, yaw and roll for the HMD and controllers<commit_after>import sys
import time
import openvr
openvr.init(openvr.VRApplication_Scene)
poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
poses = poses_t()
while True:
openvr.VRCompositor().waitGetPoses(poses, len(poses), None, 0)
hmd_pose = poses[openvr.k_unTrackedDeviceIndex_Hmd]
#print(hmd_pose.mDeviceToAbsoluteTracking)
print(poses[3].mDeviceToAbsoluteTracking)
sys.stdout.flush()
time.sleep(0.2)
openvr.shutdown()
'''
poses[0] = Headset
poses[1] = basestation??
poses[2] = ???
poses[3] = controller 1
poses[4] = controller 2
[[?, ?, ?, X(?)],
[?, ?, ?, Y(?)],
[Yaw, ?, ?, Z(?)]]
'''
|
|
f1a27644e32291d24171f97e3b8dae6b7490966d
|
94/Solution.py
|
94/Solution.py
|
class Solution:
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.inorderTraversalGen(root))
def inorderTraversalGen(self, node):
if node.left is not None:
for other in self.inorderTraversal(node.left):
yield other
yield node.value
if node.right is not None:
for other in self.inorderTraversal(node.right):
yield other
|
Add binary tree inorder traversal
|
Add binary tree inorder traversal
|
Python
|
mit
|
xliiauo/leetcode,xiao0720/leetcode,xliiauo/leetcode,xliiauo/leetcode,xiao0720/leetcode
|
Add binary tree inorder traversal
|
class Solution:
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.inorderTraversalGen(root))
def inorderTraversalGen(self, node):
if node.left is not None:
for other in self.inorderTraversal(node.left):
yield other
yield node.value
if node.right is not None:
for other in self.inorderTraversal(node.right):
yield other
|
<commit_before><commit_msg>Add binary tree inorder traversal<commit_after>
|
class Solution:
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.inorderTraversalGen(root))
def inorderTraversalGen(self, node):
if node.left is not None:
for other in self.inorderTraversal(node.left):
yield other
yield node.value
if node.right is not None:
for other in self.inorderTraversal(node.right):
yield other
|
Add binary tree inorder traversalclass Solution:
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.inorderTraversalGen(root))
def inorderTraversalGen(self, node):
if node.left is not None:
for other in self.inorderTraversal(node.left):
yield other
yield node.value
if node.right is not None:
for other in self.inorderTraversal(node.right):
yield other
|
<commit_before><commit_msg>Add binary tree inorder traversal<commit_after>class Solution:
def inorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if root is None:
return []
else:
return list(self.inorderTraversalGen(root))
def inorderTraversalGen(self, node):
if node.left is not None:
for other in self.inorderTraversal(node.left):
yield other
yield node.value
if node.right is not None:
for other in self.inorderTraversal(node.right):
yield other
|
|
58ab8c317c71aa2c5fc8d47d2bb9e4778290482e
|
bayespy/inference/vmp/nodes/tests/test_beta.py
|
bayespy/inference/vmp/nodes/tests/test_beta.py
|
######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `beta` module.
"""
import numpy as np
from scipy import special
from bayespy.nodes import Beta
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestBinomial(TestCase):
"""
Unit tests for Binomial node
"""
def test_init(self):
"""
Test the creation of binomial nodes.
"""
# Some simple initializations
p = Beta([1.5, 4.2])
# Check that plates are correct
p = Beta([2, 3], plates=(4,3))
self.assertEqual(p.plates,
(4,3))
p = Beta(np.ones((4,3,2)))
self.assertEqual(p.plates,
(4,3))
# Parent not a vector
self.assertRaises(ValueError,
Beta,
4)
# Parent vector has wrong shape
self.assertRaises(ValueError,
Beta,
[4])
self.assertRaises(ValueError,
Beta,
[4,4,4])
# Parent vector has invalid values
self.assertRaises(ValueError,
Beta,
[-2,3])
# Plates inconsistent
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(3,))
# Explicit plates too small
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(1,))
pass
def test_moments(self):
"""
Test the moments of binomial nodes.
"""
p = Beta([2, 3])
u = p._message_to_child()
self.assertAllClose(u[0],
special.psi([2,3]) - special.psi(2+3))
pass
|
Add unit tests for Beta node
|
TST: Add unit tests for Beta node
|
Python
|
mit
|
fivejjs/bayespy,SalemAmeen/bayespy,bayespy/bayespy,jluttine/bayespy
|
TST: Add unit tests for Beta node
|
######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `beta` module.
"""
import numpy as np
from scipy import special
from bayespy.nodes import Beta
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestBinomial(TestCase):
"""
Unit tests for Binomial node
"""
def test_init(self):
"""
Test the creation of binomial nodes.
"""
# Some simple initializations
p = Beta([1.5, 4.2])
# Check that plates are correct
p = Beta([2, 3], plates=(4,3))
self.assertEqual(p.plates,
(4,3))
p = Beta(np.ones((4,3,2)))
self.assertEqual(p.plates,
(4,3))
# Parent not a vector
self.assertRaises(ValueError,
Beta,
4)
# Parent vector has wrong shape
self.assertRaises(ValueError,
Beta,
[4])
self.assertRaises(ValueError,
Beta,
[4,4,4])
# Parent vector has invalid values
self.assertRaises(ValueError,
Beta,
[-2,3])
# Plates inconsistent
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(3,))
# Explicit plates too small
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(1,))
pass
def test_moments(self):
"""
Test the moments of binomial nodes.
"""
p = Beta([2, 3])
u = p._message_to_child()
self.assertAllClose(u[0],
special.psi([2,3]) - special.psi(2+3))
pass
|
<commit_before><commit_msg>TST: Add unit tests for Beta node<commit_after>
|
######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `beta` module.
"""
import numpy as np
from scipy import special
from bayespy.nodes import Beta
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestBinomial(TestCase):
"""
Unit tests for Binomial node
"""
def test_init(self):
"""
Test the creation of binomial nodes.
"""
# Some simple initializations
p = Beta([1.5, 4.2])
# Check that plates are correct
p = Beta([2, 3], plates=(4,3))
self.assertEqual(p.plates,
(4,3))
p = Beta(np.ones((4,3,2)))
self.assertEqual(p.plates,
(4,3))
# Parent not a vector
self.assertRaises(ValueError,
Beta,
4)
# Parent vector has wrong shape
self.assertRaises(ValueError,
Beta,
[4])
self.assertRaises(ValueError,
Beta,
[4,4,4])
# Parent vector has invalid values
self.assertRaises(ValueError,
Beta,
[-2,3])
# Plates inconsistent
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(3,))
# Explicit plates too small
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(1,))
pass
def test_moments(self):
"""
Test the moments of binomial nodes.
"""
p = Beta([2, 3])
u = p._message_to_child()
self.assertAllClose(u[0],
special.psi([2,3]) - special.psi(2+3))
pass
|
TST: Add unit tests for Beta node######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `beta` module.
"""
import numpy as np
from scipy import special
from bayespy.nodes import Beta
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestBinomial(TestCase):
"""
Unit tests for Binomial node
"""
def test_init(self):
"""
Test the creation of binomial nodes.
"""
# Some simple initializations
p = Beta([1.5, 4.2])
# Check that plates are correct
p = Beta([2, 3], plates=(4,3))
self.assertEqual(p.plates,
(4,3))
p = Beta(np.ones((4,3,2)))
self.assertEqual(p.plates,
(4,3))
# Parent not a vector
self.assertRaises(ValueError,
Beta,
4)
# Parent vector has wrong shape
self.assertRaises(ValueError,
Beta,
[4])
self.assertRaises(ValueError,
Beta,
[4,4,4])
# Parent vector has invalid values
self.assertRaises(ValueError,
Beta,
[-2,3])
# Plates inconsistent
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(3,))
# Explicit plates too small
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(1,))
pass
def test_moments(self):
"""
Test the moments of binomial nodes.
"""
p = Beta([2, 3])
u = p._message_to_child()
self.assertAllClose(u[0],
special.psi([2,3]) - special.psi(2+3))
pass
|
<commit_before><commit_msg>TST: Add unit tests for Beta node<commit_after>######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `beta` module.
"""
import numpy as np
from scipy import special
from bayespy.nodes import Beta
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestBinomial(TestCase):
"""
Unit tests for Binomial node
"""
def test_init(self):
"""
Test the creation of binomial nodes.
"""
# Some simple initializations
p = Beta([1.5, 4.2])
# Check that plates are correct
p = Beta([2, 3], plates=(4,3))
self.assertEqual(p.plates,
(4,3))
p = Beta(np.ones((4,3,2)))
self.assertEqual(p.plates,
(4,3))
# Parent not a vector
self.assertRaises(ValueError,
Beta,
4)
# Parent vector has wrong shape
self.assertRaises(ValueError,
Beta,
[4])
self.assertRaises(ValueError,
Beta,
[4,4,4])
# Parent vector has invalid values
self.assertRaises(ValueError,
Beta,
[-2,3])
# Plates inconsistent
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(3,))
# Explicit plates too small
self.assertRaises(ValueError,
Beta,
np.ones((4,2)),
plates=(1,))
pass
def test_moments(self):
"""
Test the moments of binomial nodes.
"""
p = Beta([2, 3])
u = p._message_to_child()
self.assertAllClose(u[0],
special.psi([2,3]) - special.psi(2+3))
pass
|
|
82345a988dfcfe468f1624ac12252cf1136cf8b1
|
core/templatetags/canedit.py
|
core/templatetags/canedit.py
|
import datetime
from django import template
register = template.Library()
@register.filter()
def can_edit(vote_or_vs, user):
return vote_or_vs.canEdit(user)
|
Add template tag to check edit permission on votes and voting systems
|
Add template tag to check edit permission on votes and voting systems
|
Python
|
mit
|
kuboschek/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay,kuboschek/jay,OpenJUB/jay
|
Add template tag to check edit permission on votes and voting systems
|
import datetime
from django import template
register = template.Library()
@register.filter()
def can_edit(vote_or_vs, user):
return vote_or_vs.canEdit(user)
|
<commit_before><commit_msg>Add template tag to check edit permission on votes and voting systems<commit_after>
|
import datetime
from django import template
register = template.Library()
@register.filter()
def can_edit(vote_or_vs, user):
return vote_or_vs.canEdit(user)
|
Add template tag to check edit permission on votes and voting systemsimport datetime
from django import template
register = template.Library()
@register.filter()
def can_edit(vote_or_vs, user):
return vote_or_vs.canEdit(user)
|
<commit_before><commit_msg>Add template tag to check edit permission on votes and voting systems<commit_after>import datetime
from django import template
register = template.Library()
@register.filter()
def can_edit(vote_or_vs, user):
return vote_or_vs.canEdit(user)
|
|
daa110457cf11d8b6f6d0d30dd656388bf942509
|
clear-all-rate-limits/clear_all_rate_limits.py
|
clear-all-rate-limits/clear_all_rate_limits.py
|
"""
Clears all user-defined rate limits for all projects in an account
Use with an account access token that has scopes 'read' and 'write'
Usage:
python clear_all_rate_limits.py YOUR_ACCOUNT_ACCESS_TOKEN
"""
from __future__ import print_function
import sys
from pprint import pprint
import requests
def main():
account_access_token = sys.argv[1]
# list the projects in this account
resp = requests.get('https://api.rollbar.com/api/1/projects', headers={'X-Rollbar-Access-Token': account_access_token})
projects = resp.json()['result']
for project in projects:
handle_project(account_access_token, project)
def handle_project(account_access_token, project):
project_id = project['id']
print("Handling project:", project_id, project['name'])
# list all project access tokens
resp = requests.get('https://api.rollbar.com/api/1/project/%d/access_tokens' % project_id, headers={'X-Rollbar-Access-Token': account_access_token})
for project_token in resp.json()['result']:
handle_project_token(account_access_token, project_id, project_token)
print()
def handle_project_token(account_access_token, project_id, project_token):
project_access_token = project_token['access_token']
print("Handling project", project_id, "token", project_access_token)
print("-- Current rate limit:", project_token['rate_limit_window_count'], "per", project_token['rate_limit_window_size'])
if project_token['rate_limit_window_size'] is None and project_token['rate_limit_window_count'] is None:
print("-- No limit; skipping")
else:
print("-- Clearing limit...")
resp = requests.patch('https://api.rollbar.com/api/1/project/{}/access_token/{}'.format(project_id, project_access_token),
{'rate_limit_window_count': 0, 'rate_limit_window_size': 0},
headers={'X-Rollbar-Access-Token': account_access_token})
resp_json = resp.json()
if resp_json['err']:
print("-- Error:", resp_json['message'])
else:
print("-- Done.")
print()
if __name__ == '__main__':
main()
|
Add script to clear all user-defined rate limits within an account
|
Add script to clear all user-defined rate limits within an account
|
Python
|
mit
|
rollbar/api-examples,rollbar/api-examples,rollbar/api-examples
|
Add script to clear all user-defined rate limits within an account
|
"""
Clears all user-defined rate limits for all projects in an account
Use with an account access token that has scopes 'read' and 'write'
Usage:
python clear_all_rate_limits.py YOUR_ACCOUNT_ACCESS_TOKEN
"""
from __future__ import print_function
import sys
from pprint import pprint
import requests
def main():
account_access_token = sys.argv[1]
# list the projects in this account
resp = requests.get('https://api.rollbar.com/api/1/projects', headers={'X-Rollbar-Access-Token': account_access_token})
projects = resp.json()['result']
for project in projects:
handle_project(account_access_token, project)
def handle_project(account_access_token, project):
project_id = project['id']
print("Handling project:", project_id, project['name'])
# list all project access tokens
resp = requests.get('https://api.rollbar.com/api/1/project/%d/access_tokens' % project_id, headers={'X-Rollbar-Access-Token': account_access_token})
for project_token in resp.json()['result']:
handle_project_token(account_access_token, project_id, project_token)
print()
def handle_project_token(account_access_token, project_id, project_token):
project_access_token = project_token['access_token']
print("Handling project", project_id, "token", project_access_token)
print("-- Current rate limit:", project_token['rate_limit_window_count'], "per", project_token['rate_limit_window_size'])
if project_token['rate_limit_window_size'] is None and project_token['rate_limit_window_count'] is None:
print("-- No limit; skipping")
else:
print("-- Clearing limit...")
resp = requests.patch('https://api.rollbar.com/api/1/project/{}/access_token/{}'.format(project_id, project_access_token),
{'rate_limit_window_count': 0, 'rate_limit_window_size': 0},
headers={'X-Rollbar-Access-Token': account_access_token})
resp_json = resp.json()
if resp_json['err']:
print("-- Error:", resp_json['message'])
else:
print("-- Done.")
print()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to clear all user-defined rate limits within an account<commit_after>
|
"""
Clears all user-defined rate limits for all projects in an account
Use with an account access token that has scopes 'read' and 'write'
Usage:
python clear_all_rate_limits.py YOUR_ACCOUNT_ACCESS_TOKEN
"""
from __future__ import print_function
import sys
from pprint import pprint
import requests
def main():
account_access_token = sys.argv[1]
# list the projects in this account
resp = requests.get('https://api.rollbar.com/api/1/projects', headers={'X-Rollbar-Access-Token': account_access_token})
projects = resp.json()['result']
for project in projects:
handle_project(account_access_token, project)
def handle_project(account_access_token, project):
project_id = project['id']
print("Handling project:", project_id, project['name'])
# list all project access tokens
resp = requests.get('https://api.rollbar.com/api/1/project/%d/access_tokens' % project_id, headers={'X-Rollbar-Access-Token': account_access_token})
for project_token in resp.json()['result']:
handle_project_token(account_access_token, project_id, project_token)
print()
def handle_project_token(account_access_token, project_id, project_token):
project_access_token = project_token['access_token']
print("Handling project", project_id, "token", project_access_token)
print("-- Current rate limit:", project_token['rate_limit_window_count'], "per", project_token['rate_limit_window_size'])
if project_token['rate_limit_window_size'] is None and project_token['rate_limit_window_count'] is None:
print("-- No limit; skipping")
else:
print("-- Clearing limit...")
resp = requests.patch('https://api.rollbar.com/api/1/project/{}/access_token/{}'.format(project_id, project_access_token),
{'rate_limit_window_count': 0, 'rate_limit_window_size': 0},
headers={'X-Rollbar-Access-Token': account_access_token})
resp_json = resp.json()
if resp_json['err']:
print("-- Error:", resp_json['message'])
else:
print("-- Done.")
print()
if __name__ == '__main__':
main()
|
Add script to clear all user-defined rate limits within an account"""
Clears all user-defined rate limits for all projects in an account
Use with an account access token that has scopes 'read' and 'write'
Usage:
python clear_all_rate_limits.py YOUR_ACCOUNT_ACCESS_TOKEN
"""
from __future__ import print_function
import sys
from pprint import pprint
import requests
def main():
account_access_token = sys.argv[1]
# list the projects in this account
resp = requests.get('https://api.rollbar.com/api/1/projects', headers={'X-Rollbar-Access-Token': account_access_token})
projects = resp.json()['result']
for project in projects:
handle_project(account_access_token, project)
def handle_project(account_access_token, project):
project_id = project['id']
print("Handling project:", project_id, project['name'])
# list all project access tokens
resp = requests.get('https://api.rollbar.com/api/1/project/%d/access_tokens' % project_id, headers={'X-Rollbar-Access-Token': account_access_token})
for project_token in resp.json()['result']:
handle_project_token(account_access_token, project_id, project_token)
print()
def handle_project_token(account_access_token, project_id, project_token):
project_access_token = project_token['access_token']
print("Handling project", project_id, "token", project_access_token)
print("-- Current rate limit:", project_token['rate_limit_window_count'], "per", project_token['rate_limit_window_size'])
if project_token['rate_limit_window_size'] is None and project_token['rate_limit_window_count'] is None:
print("-- No limit; skipping")
else:
print("-- Clearing limit...")
resp = requests.patch('https://api.rollbar.com/api/1/project/{}/access_token/{}'.format(project_id, project_access_token),
{'rate_limit_window_count': 0, 'rate_limit_window_size': 0},
headers={'X-Rollbar-Access-Token': account_access_token})
resp_json = resp.json()
if resp_json['err']:
print("-- Error:", resp_json['message'])
else:
print("-- Done.")
print()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to clear all user-defined rate limits within an account<commit_after>"""
Clears all user-defined rate limits for all projects in an account
Use with an account access token that has scopes 'read' and 'write'
Usage:
python clear_all_rate_limits.py YOUR_ACCOUNT_ACCESS_TOKEN
"""
from __future__ import print_function
import sys
from pprint import pprint
import requests
def main():
account_access_token = sys.argv[1]
# list the projects in this account
resp = requests.get('https://api.rollbar.com/api/1/projects', headers={'X-Rollbar-Access-Token': account_access_token})
projects = resp.json()['result']
for project in projects:
handle_project(account_access_token, project)
def handle_project(account_access_token, project):
project_id = project['id']
print("Handling project:", project_id, project['name'])
# list all project access tokens
resp = requests.get('https://api.rollbar.com/api/1/project/%d/access_tokens' % project_id, headers={'X-Rollbar-Access-Token': account_access_token})
for project_token in resp.json()['result']:
handle_project_token(account_access_token, project_id, project_token)
print()
def handle_project_token(account_access_token, project_id, project_token):
project_access_token = project_token['access_token']
print("Handling project", project_id, "token", project_access_token)
print("-- Current rate limit:", project_token['rate_limit_window_count'], "per", project_token['rate_limit_window_size'])
if project_token['rate_limit_window_size'] is None and project_token['rate_limit_window_count'] is None:
print("-- No limit; skipping")
else:
print("-- Clearing limit...")
resp = requests.patch('https://api.rollbar.com/api/1/project/{}/access_token/{}'.format(project_id, project_access_token),
{'rate_limit_window_count': 0, 'rate_limit_window_size': 0},
headers={'X-Rollbar-Access-Token': account_access_token})
resp_json = resp.json()
if resp_json['err']:
print("-- Error:", resp_json['message'])
else:
print("-- Done.")
print()
if __name__ == '__main__':
main()
|
|
4f7af6fc11e529ffd2dd81690797a913163bea5f
|
warehouse/database/types.py
|
warehouse/database/types.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from sqlalchemy.types import SchemaType, TypeDecorator
from sqlalchemy.types import Enum as SQLAEnum
class EnumSymbol(object):
"""
Define a fixed symbol tied to a parent class.
"""
def __init__(self, cls_, name, value, description):
self.cls_ = cls_
self.name = name
self.value = value
self.description = description
def __reduce__(self):
"""
Allow unpickling to return the symbol linked to the Enum class.
"""
return getattr, (self.cls_, self.name)
def __iter__(self):
return iter([self.value, self.description])
def __repr__(self):
return "<%s>" % self.name
class EnumMeta(type):
"""
Generate new Enum classes.
"""
def __init__(cls, classname, bases, dict_):
cls._reg = reg = cls._reg.copy()
for k, v in dict_.items():
if isinstance(v, tuple):
sym = reg[v[0]] = EnumSymbol(cls, k, *v)
setattr(cls, k, sym)
return type.__init__(cls, classname, bases, dict_)
def __iter__(cls):
return iter(cls._reg.values())
class EnumType(SchemaType, TypeDecorator):
def __init__(self, enum):
self.enum = enum
self.impl = SQLAEnum(
*enum.values(),
name="ck%s" % re.sub(
"([A-Z])",
lambda m: "_" + m.group(1).lower(),
enum.__name__))
def _set_table(self, table, column):
self.impl._set_table(table, column)
def copy(self):
return EnumType(self.enum)
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.value
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum.from_string(value.strip())
class Enum(object):
"""
Declarative enumeration.
"""
__metaclass__ = EnumMeta
_reg = {}
@classmethod
def from_string(cls, value):
try:
return cls._reg[value]
except KeyError:
raise ValueError(
"Invalid value for %r: %r" %
(cls.__name__, value)
)
@classmethod
def values(cls):
return cls._reg.keys()
@classmethod
def db_type(cls):
return EnumType(cls)
|
Add an Enum type that is in Python as well as SQL
|
Add an Enum type that is in Python as well as SQL
|
Python
|
bsd-2-clause
|
davidfischer/warehouse
|
Add an Enum type that is in Python as well as SQL
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from sqlalchemy.types import SchemaType, TypeDecorator
from sqlalchemy.types import Enum as SQLAEnum
class EnumSymbol(object):
"""
Define a fixed symbol tied to a parent class.
"""
def __init__(self, cls_, name, value, description):
self.cls_ = cls_
self.name = name
self.value = value
self.description = description
def __reduce__(self):
"""
Allow unpickling to return the symbol linked to the Enum class.
"""
return getattr, (self.cls_, self.name)
def __iter__(self):
return iter([self.value, self.description])
def __repr__(self):
return "<%s>" % self.name
class EnumMeta(type):
"""
Generate new Enum classes.
"""
def __init__(cls, classname, bases, dict_):
cls._reg = reg = cls._reg.copy()
for k, v in dict_.items():
if isinstance(v, tuple):
sym = reg[v[0]] = EnumSymbol(cls, k, *v)
setattr(cls, k, sym)
return type.__init__(cls, classname, bases, dict_)
def __iter__(cls):
return iter(cls._reg.values())
class EnumType(SchemaType, TypeDecorator):
def __init__(self, enum):
self.enum = enum
self.impl = SQLAEnum(
*enum.values(),
name="ck%s" % re.sub(
"([A-Z])",
lambda m: "_" + m.group(1).lower(),
enum.__name__))
def _set_table(self, table, column):
self.impl._set_table(table, column)
def copy(self):
return EnumType(self.enum)
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.value
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum.from_string(value.strip())
class Enum(object):
"""
Declarative enumeration.
"""
__metaclass__ = EnumMeta
_reg = {}
@classmethod
def from_string(cls, value):
try:
return cls._reg[value]
except KeyError:
raise ValueError(
"Invalid value for %r: %r" %
(cls.__name__, value)
)
@classmethod
def values(cls):
return cls._reg.keys()
@classmethod
def db_type(cls):
return EnumType(cls)
|
<commit_before><commit_msg>Add an Enum type that is in Python as well as SQL<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from sqlalchemy.types import SchemaType, TypeDecorator
from sqlalchemy.types import Enum as SQLAEnum
class EnumSymbol(object):
"""
Define a fixed symbol tied to a parent class.
"""
def __init__(self, cls_, name, value, description):
self.cls_ = cls_
self.name = name
self.value = value
self.description = description
def __reduce__(self):
"""
Allow unpickling to return the symbol linked to the Enum class.
"""
return getattr, (self.cls_, self.name)
def __iter__(self):
return iter([self.value, self.description])
def __repr__(self):
return "<%s>" % self.name
class EnumMeta(type):
"""
Generate new Enum classes.
"""
def __init__(cls, classname, bases, dict_):
cls._reg = reg = cls._reg.copy()
for k, v in dict_.items():
if isinstance(v, tuple):
sym = reg[v[0]] = EnumSymbol(cls, k, *v)
setattr(cls, k, sym)
return type.__init__(cls, classname, bases, dict_)
def __iter__(cls):
return iter(cls._reg.values())
class EnumType(SchemaType, TypeDecorator):
def __init__(self, enum):
self.enum = enum
self.impl = SQLAEnum(
*enum.values(),
name="ck%s" % re.sub(
"([A-Z])",
lambda m: "_" + m.group(1).lower(),
enum.__name__))
def _set_table(self, table, column):
self.impl._set_table(table, column)
def copy(self):
return EnumType(self.enum)
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.value
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum.from_string(value.strip())
class Enum(object):
"""
Declarative enumeration.
"""
__metaclass__ = EnumMeta
_reg = {}
@classmethod
def from_string(cls, value):
try:
return cls._reg[value]
except KeyError:
raise ValueError(
"Invalid value for %r: %r" %
(cls.__name__, value)
)
@classmethod
def values(cls):
return cls._reg.keys()
@classmethod
def db_type(cls):
return EnumType(cls)
|
Add an Enum type that is in Python as well as SQLfrom __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from sqlalchemy.types import SchemaType, TypeDecorator
from sqlalchemy.types import Enum as SQLAEnum
class EnumSymbol(object):
"""
Define a fixed symbol tied to a parent class.
"""
def __init__(self, cls_, name, value, description):
self.cls_ = cls_
self.name = name
self.value = value
self.description = description
def __reduce__(self):
"""
Allow unpickling to return the symbol linked to the Enum class.
"""
return getattr, (self.cls_, self.name)
def __iter__(self):
return iter([self.value, self.description])
def __repr__(self):
return "<%s>" % self.name
class EnumMeta(type):
"""
Generate new Enum classes.
"""
def __init__(cls, classname, bases, dict_):
cls._reg = reg = cls._reg.copy()
for k, v in dict_.items():
if isinstance(v, tuple):
sym = reg[v[0]] = EnumSymbol(cls, k, *v)
setattr(cls, k, sym)
return type.__init__(cls, classname, bases, dict_)
def __iter__(cls):
return iter(cls._reg.values())
class EnumType(SchemaType, TypeDecorator):
def __init__(self, enum):
self.enum = enum
self.impl = SQLAEnum(
*enum.values(),
name="ck%s" % re.sub(
"([A-Z])",
lambda m: "_" + m.group(1).lower(),
enum.__name__))
def _set_table(self, table, column):
self.impl._set_table(table, column)
def copy(self):
return EnumType(self.enum)
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.value
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum.from_string(value.strip())
class Enum(object):
"""
Declarative enumeration.
"""
__metaclass__ = EnumMeta
_reg = {}
@classmethod
def from_string(cls, value):
try:
return cls._reg[value]
except KeyError:
raise ValueError(
"Invalid value for %r: %r" %
(cls.__name__, value)
)
@classmethod
def values(cls):
return cls._reg.keys()
@classmethod
def db_type(cls):
return EnumType(cls)
|
<commit_before><commit_msg>Add an Enum type that is in Python as well as SQL<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from sqlalchemy.types import SchemaType, TypeDecorator
from sqlalchemy.types import Enum as SQLAEnum
class EnumSymbol(object):
"""
Define a fixed symbol tied to a parent class.
"""
def __init__(self, cls_, name, value, description):
self.cls_ = cls_
self.name = name
self.value = value
self.description = description
def __reduce__(self):
"""
Allow unpickling to return the symbol linked to the Enum class.
"""
return getattr, (self.cls_, self.name)
def __iter__(self):
return iter([self.value, self.description])
def __repr__(self):
return "<%s>" % self.name
class EnumMeta(type):
"""
Generate new Enum classes.
"""
def __init__(cls, classname, bases, dict_):
cls._reg = reg = cls._reg.copy()
for k, v in dict_.items():
if isinstance(v, tuple):
sym = reg[v[0]] = EnumSymbol(cls, k, *v)
setattr(cls, k, sym)
return type.__init__(cls, classname, bases, dict_)
def __iter__(cls):
return iter(cls._reg.values())
class EnumType(SchemaType, TypeDecorator):
def __init__(self, enum):
self.enum = enum
self.impl = SQLAEnum(
*enum.values(),
name="ck%s" % re.sub(
"([A-Z])",
lambda m: "_" + m.group(1).lower(),
enum.__name__))
def _set_table(self, table, column):
self.impl._set_table(table, column)
def copy(self):
return EnumType(self.enum)
def process_bind_param(self, value, dialect):
if value is None:
return None
return value.value
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum.from_string(value.strip())
class Enum(object):
"""
Declarative enumeration.
"""
__metaclass__ = EnumMeta
_reg = {}
@classmethod
def from_string(cls, value):
try:
return cls._reg[value]
except KeyError:
raise ValueError(
"Invalid value for %r: %r" %
(cls.__name__, value)
)
@classmethod
def values(cls):
return cls._reg.keys()
@classmethod
def db_type(cls):
return EnumType(cls)
|
|
e46e512fad9bc92c1725711e2800e44bb699d281
|
deploy/mirrors/greasyfork.py
|
deploy/mirrors/greasyfork.py
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
Fix Greasy Fork deploy script.
|
Fix Greasy Fork deploy script.
|
Python
|
bsd-2-clause
|
MNBuyskih/adsbypasser,tablesmit/adsbypasser,xor10/adsbypasser,kehugter/adsbypasser,tosunkaya/adsbypasser,xor10/adsbypasser,MNBuyskih/adsbypasser,tablesmit/adsbypasser,kehugter/adsbypasser,kehugter/adsbypasser,tablesmit/adsbypasser,xor10/adsbypasser,tosunkaya/adsbypasser,MNBuyskih/adsbypasser,tosunkaya/adsbypasser
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
Fix Greasy Fork deploy script.
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
<commit_before>from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
<commit_msg>Fix Greasy Fork deploy script.<commit_after>
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
Fix Greasy Fork deploy script.from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
<commit_before>from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=1)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=1)
b['script_version[additional_info]'] = summary.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
<commit_msg>Fix Greasy Fork deploy script.<commit_after>from mechanize import Browser
def exec_(config, summary, script):
USERNAME = config['USERNAME']
PASSWORD = config['PASSWORD']
SCRIPT_ID = config['SCRIPT_ID']
LOGIN_URL = 'https://greasyfork.org/users/sign_in'
EDIT_URL = 'https://greasyfork.org/scripts/{0}/versions/new'.format(SCRIPT_ID)
b = Browser()
# login
b.open(LOGIN_URL)
b.select_form(nr=2)
b['user[email]'] = USERNAME
b['user[password]'] = PASSWORD
b.submit()
# edit source
b.open(EDIT_URL)
b.select_form(nr=2)
b['script_version[additional_info]'] = summary.encode('utf-8')
b['script_version[code]'] = script.encode('utf-8')
b.submit(name='commit')
# ex: ts=4 sts=4 sw=4 et
# sublime: tab_size 4; translate_tabs_to_spaces true; detect_indentation false; use_tab_stops true;
# kate: space-indent on; indent-width 4;
|
4d0725218ad613451917ee34849f646982bcae59
|
tools/metrics/histograms/histogram_ownership.py
|
tools/metrics/histograms/histogram_ownership.py
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple tool to go through histograms.xml and print out the owners for
histograms.
"""
import xml.etree.ElementTree
DUMMY_OWNER = "Please list the metric's owners. Add more owner tags as needed."
def main():
tree = xml.etree.ElementTree.parse('histograms.xml')
root = tree.getroot()
assert root.tag == 'histogram-configuration'
root_children = root.getchildren()
histograms = None
for node in root_children:
if node.tag == 'histograms':
histograms = node
break
assert histograms != None
for histogram in histograms.getchildren():
if histogram.tag != 'histogram':
continue
name = histogram.attrib['name']
owners = []
obsolete = False
for node in histogram.getchildren():
if node.tag == 'obsolete':
obsolete = True
continue
if node.tag != 'owner':
continue
if node.text == DUMMY_OWNER:
continue
assert '@' in node.text
owners.append(node.text)
if not obsolete:
if owners:
print name, ' '.join(owners)
else:
print name, 'NO_OWNER'
if __name__ == '__main__':
main()
|
Add a script to print out histogram ownership.
|
Metrics: Add a script to print out histogram ownership.
NOTRY=true
Review URL: https://codereview.chromium.org/456943002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@288484 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,ondra-novak/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk
|
Metrics: Add a script to print out histogram ownership.
NOTRY=true
Review URL: https://codereview.chromium.org/456943002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@288484 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple tool to go through histograms.xml and print out the owners for
histograms.
"""
import xml.etree.ElementTree
DUMMY_OWNER = "Please list the metric's owners. Add more owner tags as needed."
def main():
tree = xml.etree.ElementTree.parse('histograms.xml')
root = tree.getroot()
assert root.tag == 'histogram-configuration'
root_children = root.getchildren()
histograms = None
for node in root_children:
if node.tag == 'histograms':
histograms = node
break
assert histograms != None
for histogram in histograms.getchildren():
if histogram.tag != 'histogram':
continue
name = histogram.attrib['name']
owners = []
obsolete = False
for node in histogram.getchildren():
if node.tag == 'obsolete':
obsolete = True
continue
if node.tag != 'owner':
continue
if node.text == DUMMY_OWNER:
continue
assert '@' in node.text
owners.append(node.text)
if not obsolete:
if owners:
print name, ' '.join(owners)
else:
print name, 'NO_OWNER'
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Metrics: Add a script to print out histogram ownership.
NOTRY=true
Review URL: https://codereview.chromium.org/456943002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@288484 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple tool to go through histograms.xml and print out the owners for
histograms.
"""
import xml.etree.ElementTree
DUMMY_OWNER = "Please list the metric's owners. Add more owner tags as needed."
def main():
tree = xml.etree.ElementTree.parse('histograms.xml')
root = tree.getroot()
assert root.tag == 'histogram-configuration'
root_children = root.getchildren()
histograms = None
for node in root_children:
if node.tag == 'histograms':
histograms = node
break
assert histograms != None
for histogram in histograms.getchildren():
if histogram.tag != 'histogram':
continue
name = histogram.attrib['name']
owners = []
obsolete = False
for node in histogram.getchildren():
if node.tag == 'obsolete':
obsolete = True
continue
if node.tag != 'owner':
continue
if node.text == DUMMY_OWNER:
continue
assert '@' in node.text
owners.append(node.text)
if not obsolete:
if owners:
print name, ' '.join(owners)
else:
print name, 'NO_OWNER'
if __name__ == '__main__':
main()
|
Metrics: Add a script to print out histogram ownership.
NOTRY=true
Review URL: https://codereview.chromium.org/456943002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@288484 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple tool to go through histograms.xml and print out the owners for
histograms.
"""
import xml.etree.ElementTree
DUMMY_OWNER = "Please list the metric's owners. Add more owner tags as needed."
def main():
tree = xml.etree.ElementTree.parse('histograms.xml')
root = tree.getroot()
assert root.tag == 'histogram-configuration'
root_children = root.getchildren()
histograms = None
for node in root_children:
if node.tag == 'histograms':
histograms = node
break
assert histograms != None
for histogram in histograms.getchildren():
if histogram.tag != 'histogram':
continue
name = histogram.attrib['name']
owners = []
obsolete = False
for node in histogram.getchildren():
if node.tag == 'obsolete':
obsolete = True
continue
if node.tag != 'owner':
continue
if node.text == DUMMY_OWNER:
continue
assert '@' in node.text
owners.append(node.text)
if not obsolete:
if owners:
print name, ' '.join(owners)
else:
print name, 'NO_OWNER'
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Metrics: Add a script to print out histogram ownership.
NOTRY=true
Review URL: https://codereview.chromium.org/456943002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@288484 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple tool to go through histograms.xml and print out the owners for
histograms.
"""
import xml.etree.ElementTree
DUMMY_OWNER = "Please list the metric's owners. Add more owner tags as needed."
def main():
tree = xml.etree.ElementTree.parse('histograms.xml')
root = tree.getroot()
assert root.tag == 'histogram-configuration'
root_children = root.getchildren()
histograms = None
for node in root_children:
if node.tag == 'histograms':
histograms = node
break
assert histograms != None
for histogram in histograms.getchildren():
if histogram.tag != 'histogram':
continue
name = histogram.attrib['name']
owners = []
obsolete = False
for node in histogram.getchildren():
if node.tag == 'obsolete':
obsolete = True
continue
if node.tag != 'owner':
continue
if node.text == DUMMY_OWNER:
continue
assert '@' in node.text
owners.append(node.text)
if not obsolete:
if owners:
print name, ' '.join(owners)
else:
print name, 'NO_OWNER'
if __name__ == '__main__':
main()
|
|
7c95f3ec58751b71d10b96621ec401a9549b3216
|
data_log/migrations/0018_auto_20190909_2004.py
|
data_log/migrations/0018_auto_20190909_2004.py
|
# Generated by Django 2.1.11 on 2019-09-10 03:04
from django.db import migrations
from django.db.models import F
def delete_unowned_raid_drops(apps, schema_editor):
RiftRaidItemDrop = apps.get_model('data_log', 'RiftRaidItemDrop')
RiftRaidMonsterDrop = apps.get_model('data_log', 'RiftRaidMonsterDrop')
RiftRaidRuneCraftDrop = apps.get_model('data_log', 'RiftRaidRuneCraftDrop')
for m in [RiftRaidItemDrop, RiftRaidMonsterDrop, RiftRaidRuneCraftDrop]:
m.objects.exclude(log__wizard_id=F('wizard_id')).delete()
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('data_log', '0017_auto_20190903_1245'),
]
operations = [
migrations.RunPython(delete_unowned_raid_drops, noop)
]
|
Remove drops from raid logs that are not owned by the user submitting the logs
|
Remove drops from raid logs that are not owned by the user submitting the logs
|
Python
|
apache-2.0
|
porksmash/swarfarm,porksmash/swarfarm,PeteAndersen/swarfarm,porksmash/swarfarm,porksmash/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm
|
Remove drops from raid logs that are not owned by the user submitting the logs
|
# Generated by Django 2.1.11 on 2019-09-10 03:04
from django.db import migrations
from django.db.models import F
def delete_unowned_raid_drops(apps, schema_editor):
RiftRaidItemDrop = apps.get_model('data_log', 'RiftRaidItemDrop')
RiftRaidMonsterDrop = apps.get_model('data_log', 'RiftRaidMonsterDrop')
RiftRaidRuneCraftDrop = apps.get_model('data_log', 'RiftRaidRuneCraftDrop')
for m in [RiftRaidItemDrop, RiftRaidMonsterDrop, RiftRaidRuneCraftDrop]:
m.objects.exclude(log__wizard_id=F('wizard_id')).delete()
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('data_log', '0017_auto_20190903_1245'),
]
operations = [
migrations.RunPython(delete_unowned_raid_drops, noop)
]
|
<commit_before><commit_msg>Remove drops from raid logs that are not owned by the user submitting the logs<commit_after>
|
# Generated by Django 2.1.11 on 2019-09-10 03:04
from django.db import migrations
from django.db.models import F
def delete_unowned_raid_drops(apps, schema_editor):
RiftRaidItemDrop = apps.get_model('data_log', 'RiftRaidItemDrop')
RiftRaidMonsterDrop = apps.get_model('data_log', 'RiftRaidMonsterDrop')
RiftRaidRuneCraftDrop = apps.get_model('data_log', 'RiftRaidRuneCraftDrop')
for m in [RiftRaidItemDrop, RiftRaidMonsterDrop, RiftRaidRuneCraftDrop]:
m.objects.exclude(log__wizard_id=F('wizard_id')).delete()
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('data_log', '0017_auto_20190903_1245'),
]
operations = [
migrations.RunPython(delete_unowned_raid_drops, noop)
]
|
Remove drops from raid logs that are not owned by the user submitting the logs# Generated by Django 2.1.11 on 2019-09-10 03:04
from django.db import migrations
from django.db.models import F
def delete_unowned_raid_drops(apps, schema_editor):
RiftRaidItemDrop = apps.get_model('data_log', 'RiftRaidItemDrop')
RiftRaidMonsterDrop = apps.get_model('data_log', 'RiftRaidMonsterDrop')
RiftRaidRuneCraftDrop = apps.get_model('data_log', 'RiftRaidRuneCraftDrop')
for m in [RiftRaidItemDrop, RiftRaidMonsterDrop, RiftRaidRuneCraftDrop]:
m.objects.exclude(log__wizard_id=F('wizard_id')).delete()
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('data_log', '0017_auto_20190903_1245'),
]
operations = [
migrations.RunPython(delete_unowned_raid_drops, noop)
]
|
<commit_before><commit_msg>Remove drops from raid logs that are not owned by the user submitting the logs<commit_after># Generated by Django 2.1.11 on 2019-09-10 03:04
from django.db import migrations
from django.db.models import F
def delete_unowned_raid_drops(apps, schema_editor):
RiftRaidItemDrop = apps.get_model('data_log', 'RiftRaidItemDrop')
RiftRaidMonsterDrop = apps.get_model('data_log', 'RiftRaidMonsterDrop')
RiftRaidRuneCraftDrop = apps.get_model('data_log', 'RiftRaidRuneCraftDrop')
for m in [RiftRaidItemDrop, RiftRaidMonsterDrop, RiftRaidRuneCraftDrop]:
m.objects.exclude(log__wizard_id=F('wizard_id')).delete()
def noop(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('data_log', '0017_auto_20190903_1245'),
]
operations = [
migrations.RunPython(delete_unowned_raid_drops, noop)
]
|
|
56396f980236f6d909f63d7faaddd357f5fe235b
|
stock_quant_merge/models/stock.py
|
stock_quant_merge/models/stock.py
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
Use browse record instead of ids
|
[MOD] Use browse record instead of ids
|
Python
|
agpl-3.0
|
InakiZabala/odoomrp-wip,Eficent/odoomrp-wip,diagramsoftware/odoomrp-wip,jobiols/odoomrp-wip,Antiun/odoomrp-wip,factorlibre/odoomrp-wip,raycarnes/odoomrp-wip,Daniel-CA/odoomrp-wip-public,esthermm/odoomrp-wip,odoomrp/odoomrp-wip,Daniel-CA/odoomrp-wip-public,odoomrp/odoomrp-wip,oihane/odoomrp-wip,jobiols/odoomrp-wip,odoocn/odoomrp-wip,michaeljohn32/odoomrp-wip,alhashash/odoomrp-wip,esthermm/odoomrp-wip,agaldona/odoomrp-wip-1,Endika/odoomrp-wip,ddico/odoomrp-wip,oihane/odoomrp-wip,jorsea/odoomrp-wip,Eficent/odoomrp-wip,maljac/odoomrp-wip,xpansa/odoomrp-wip,diagramsoftware/odoomrp-wip,sergiocorato/odoomrp-wip,sergiocorato/odoomrp-wip,windedge/odoomrp-wip,agaldona/odoomrp-wip-1,alfredoavanzosc/odoomrp-wip-1,dvitme/odoomrp-wip,slevenhagen/odoomrp-wip-npg,factorlibre/odoomrp-wip,invitu/odoomrp-wip
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
[MOD] Use browse record instead of ids
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
<commit_msg>[MOD] Use browse record instead of ids<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
[MOD] Use browse record instead of ids# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
<commit_msg>[MOD] Use browse record instead of ids<commit_after># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
8ab1e018319fc7fc3837f1d8d1dd59a0dc3f2eb5
|
tests/compiler/test_conditional_compilation.py
|
tests/compiler/test_conditional_compilation.py
|
from tests.compiler import compile_snippet, STATIC_START, internal_call
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump
PREFIX = [
OpcodePushStatic(STATIC_START),
OpcodePushStatic(STATIC_START + 1),
internal_call('text.__equals__'),
]
def test_simple_conditional():
assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [
OpcodeJumpConditional(26),
OpcodePushStatic(STATIC_START + 2),
internal_call('Console.write')
]
def test_empty_conditional():
assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [
OpcodeJumpConditional(24)
]
|
Add test for conditional compilation
|
Add test for conditional compilation
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Add test for conditional compilation
|
from tests.compiler import compile_snippet, STATIC_START, internal_call
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump
PREFIX = [
OpcodePushStatic(STATIC_START),
OpcodePushStatic(STATIC_START + 1),
internal_call('text.__equals__'),
]
def test_simple_conditional():
assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [
OpcodeJumpConditional(26),
OpcodePushStatic(STATIC_START + 2),
internal_call('Console.write')
]
def test_empty_conditional():
assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [
OpcodeJumpConditional(24)
]
|
<commit_before><commit_msg>Add test for conditional compilation<commit_after>
|
from tests.compiler import compile_snippet, STATIC_START, internal_call
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump
PREFIX = [
OpcodePushStatic(STATIC_START),
OpcodePushStatic(STATIC_START + 1),
internal_call('text.__equals__'),
]
def test_simple_conditional():
assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [
OpcodeJumpConditional(26),
OpcodePushStatic(STATIC_START + 2),
internal_call('Console.write')
]
def test_empty_conditional():
assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [
OpcodeJumpConditional(24)
]
|
Add test for conditional compilationfrom tests.compiler import compile_snippet, STATIC_START, internal_call
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump
PREFIX = [
OpcodePushStatic(STATIC_START),
OpcodePushStatic(STATIC_START + 1),
internal_call('text.__equals__'),
]
def test_simple_conditional():
assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [
OpcodeJumpConditional(26),
OpcodePushStatic(STATIC_START + 2),
internal_call('Console.write')
]
def test_empty_conditional():
assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [
OpcodeJumpConditional(24)
]
|
<commit_before><commit_msg>Add test for conditional compilation<commit_after>from tests.compiler import compile_snippet, STATIC_START, internal_call
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump
PREFIX = [
OpcodePushStatic(STATIC_START),
OpcodePushStatic(STATIC_START + 1),
internal_call('text.__equals__'),
]
def test_simple_conditional():
assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [
OpcodeJumpConditional(26),
OpcodePushStatic(STATIC_START + 2),
internal_call('Console.write')
]
def test_empty_conditional():
assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [
OpcodeJumpConditional(24)
]
|
|
60dd5e0c333b4631db392745ddcdab23b95f4da0
|
tensor2tensor/data_generators/genetics_test.py
|
tensor2tensor/data_generators/genetics_test.py
|
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Genetics problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.data_generators import genetics
import tensorflow as tf
class GeneticsTest(tf.test.TestCase):
def _oneHotBases(self, bases):
one_hots = []
for base_id in bases:
one_hot = [False] * 4
if base_id < 4:
one_hot[base_id] = True
one_hots.append(one_hot)
return np.array(one_hots)
def testRecordToExample(self):
inputs = self._oneHotBases([0, 1, 3, 4, 1, 0])
mask = np.array([True, False, True])
outputs = np.array([[1.0, 2.0, 3.0], [5.0, 1.0, 0.2], [5.1, 2.3, 2.3]])
ex_dict = genetics.to_example_dict(inputs, mask, outputs)
self.assertAllEqual([2, 3, 5, 6, 3, 2, 1], ex_dict["inputs"])
self.assertAllEqual([1.0, 0.0, 1.0], ex_dict["targets_mask"])
self.assertAllEqual([1.0, 2.0, 3.0, 5.0, 1.0, 0.2, 5.1, 2.3, 2.3],
ex_dict["targets"])
self.assertAllEqual([3, 3], ex_dict["targets_shape"])
def testGenerateShardArgs(self):
num_examples = 37
num_shards = 4
outfiles = [str(i) for i in range(num_shards)]
shard_args = genetics.generate_shard_args(outfiles, num_examples)
starts, ends, fnames = zip(*shard_args)
self.assertAllEqual([0, 9, 18, 27], starts)
self.assertAllEqual([9, 18, 27, 37], ends)
self.assertAllEqual(fnames, outfiles)
if __name__ == "__main__":
tf.test.main()
|
Add tests for genetics problems
|
Add tests for genetics problems
PiperOrigin-RevId: 162569505
|
Python
|
apache-2.0
|
waterblue13/tensor2tensor,rsepassi/tensor2tensor,waterblue13/tensor2tensor,tensorflow/tensor2tensor,tensorflow/tensor2tensor,tensorflow/tensor2tensor,vthorsteinsson/tensor2tensor,rsepassi/tensor2tensor,tensorflow/tensor2tensor,waterblue13/tensor2tensor,vthorsteinsson/tensor2tensor,rsepassi/tensor2tensor,vthorsteinsson/tensor2tensor,rsepassi/tensor2tensor,vthorsteinsson/tensor2tensor,tensorflow/tensor2tensor
|
Add tests for genetics problems
PiperOrigin-RevId: 162569505
|
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Genetics problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.data_generators import genetics
import tensorflow as tf
class GeneticsTest(tf.test.TestCase):
def _oneHotBases(self, bases):
one_hots = []
for base_id in bases:
one_hot = [False] * 4
if base_id < 4:
one_hot[base_id] = True
one_hots.append(one_hot)
return np.array(one_hots)
def testRecordToExample(self):
inputs = self._oneHotBases([0, 1, 3, 4, 1, 0])
mask = np.array([True, False, True])
outputs = np.array([[1.0, 2.0, 3.0], [5.0, 1.0, 0.2], [5.1, 2.3, 2.3]])
ex_dict = genetics.to_example_dict(inputs, mask, outputs)
self.assertAllEqual([2, 3, 5, 6, 3, 2, 1], ex_dict["inputs"])
self.assertAllEqual([1.0, 0.0, 1.0], ex_dict["targets_mask"])
self.assertAllEqual([1.0, 2.0, 3.0, 5.0, 1.0, 0.2, 5.1, 2.3, 2.3],
ex_dict["targets"])
self.assertAllEqual([3, 3], ex_dict["targets_shape"])
def testGenerateShardArgs(self):
num_examples = 37
num_shards = 4
outfiles = [str(i) for i in range(num_shards)]
shard_args = genetics.generate_shard_args(outfiles, num_examples)
starts, ends, fnames = zip(*shard_args)
self.assertAllEqual([0, 9, 18, 27], starts)
self.assertAllEqual([9, 18, 27, 37], ends)
self.assertAllEqual(fnames, outfiles)
if __name__ == "__main__":
tf.test.main()
|
<commit_before><commit_msg>Add tests for genetics problems
PiperOrigin-RevId: 162569505<commit_after>
|
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Genetics problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.data_generators import genetics
import tensorflow as tf
class GeneticsTest(tf.test.TestCase):
def _oneHotBases(self, bases):
one_hots = []
for base_id in bases:
one_hot = [False] * 4
if base_id < 4:
one_hot[base_id] = True
one_hots.append(one_hot)
return np.array(one_hots)
def testRecordToExample(self):
inputs = self._oneHotBases([0, 1, 3, 4, 1, 0])
mask = np.array([True, False, True])
outputs = np.array([[1.0, 2.0, 3.0], [5.0, 1.0, 0.2], [5.1, 2.3, 2.3]])
ex_dict = genetics.to_example_dict(inputs, mask, outputs)
self.assertAllEqual([2, 3, 5, 6, 3, 2, 1], ex_dict["inputs"])
self.assertAllEqual([1.0, 0.0, 1.0], ex_dict["targets_mask"])
self.assertAllEqual([1.0, 2.0, 3.0, 5.0, 1.0, 0.2, 5.1, 2.3, 2.3],
ex_dict["targets"])
self.assertAllEqual([3, 3], ex_dict["targets_shape"])
def testGenerateShardArgs(self):
num_examples = 37
num_shards = 4
outfiles = [str(i) for i in range(num_shards)]
shard_args = genetics.generate_shard_args(outfiles, num_examples)
starts, ends, fnames = zip(*shard_args)
self.assertAllEqual([0, 9, 18, 27], starts)
self.assertAllEqual([9, 18, 27, 37], ends)
self.assertAllEqual(fnames, outfiles)
if __name__ == "__main__":
tf.test.main()
|
Add tests for genetics problems
PiperOrigin-RevId: 162569505# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Genetics problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.data_generators import genetics
import tensorflow as tf
class GeneticsTest(tf.test.TestCase):
def _oneHotBases(self, bases):
one_hots = []
for base_id in bases:
one_hot = [False] * 4
if base_id < 4:
one_hot[base_id] = True
one_hots.append(one_hot)
return np.array(one_hots)
def testRecordToExample(self):
inputs = self._oneHotBases([0, 1, 3, 4, 1, 0])
mask = np.array([True, False, True])
outputs = np.array([[1.0, 2.0, 3.0], [5.0, 1.0, 0.2], [5.1, 2.3, 2.3]])
ex_dict = genetics.to_example_dict(inputs, mask, outputs)
self.assertAllEqual([2, 3, 5, 6, 3, 2, 1], ex_dict["inputs"])
self.assertAllEqual([1.0, 0.0, 1.0], ex_dict["targets_mask"])
self.assertAllEqual([1.0, 2.0, 3.0, 5.0, 1.0, 0.2, 5.1, 2.3, 2.3],
ex_dict["targets"])
self.assertAllEqual([3, 3], ex_dict["targets_shape"])
def testGenerateShardArgs(self):
num_examples = 37
num_shards = 4
outfiles = [str(i) for i in range(num_shards)]
shard_args = genetics.generate_shard_args(outfiles, num_examples)
starts, ends, fnames = zip(*shard_args)
self.assertAllEqual([0, 9, 18, 27], starts)
self.assertAllEqual([9, 18, 27, 37], ends)
self.assertAllEqual(fnames, outfiles)
if __name__ == "__main__":
tf.test.main()
|
<commit_before><commit_msg>Add tests for genetics problems
PiperOrigin-RevId: 162569505<commit_after># Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Genetics problems."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.data_generators import genetics
import tensorflow as tf
class GeneticsTest(tf.test.TestCase):
def _oneHotBases(self, bases):
one_hots = []
for base_id in bases:
one_hot = [False] * 4
if base_id < 4:
one_hot[base_id] = True
one_hots.append(one_hot)
return np.array(one_hots)
def testRecordToExample(self):
inputs = self._oneHotBases([0, 1, 3, 4, 1, 0])
mask = np.array([True, False, True])
outputs = np.array([[1.0, 2.0, 3.0], [5.0, 1.0, 0.2], [5.1, 2.3, 2.3]])
ex_dict = genetics.to_example_dict(inputs, mask, outputs)
self.assertAllEqual([2, 3, 5, 6, 3, 2, 1], ex_dict["inputs"])
self.assertAllEqual([1.0, 0.0, 1.0], ex_dict["targets_mask"])
self.assertAllEqual([1.0, 2.0, 3.0, 5.0, 1.0, 0.2, 5.1, 2.3, 2.3],
ex_dict["targets"])
self.assertAllEqual([3, 3], ex_dict["targets_shape"])
def testGenerateShardArgs(self):
num_examples = 37
num_shards = 4
outfiles = [str(i) for i in range(num_shards)]
shard_args = genetics.generate_shard_args(outfiles, num_examples)
starts, ends, fnames = zip(*shard_args)
self.assertAllEqual([0, 9, 18, 27], starts)
self.assertAllEqual([9, 18, 27, 37], ends)
self.assertAllEqual(fnames, outfiles)
if __name__ == "__main__":
tf.test.main()
|
|
7ebd68fab54ae697bd3da04704102ed77b387460
|
testapp/testapp/testmain/tests/test_parsers.py
|
testapp/testapp/testmain/tests/test_parsers.py
|
from datetime import date, datetime
import pytz
from django.test import TestCase
from django_afip import parsers
class ParseDatetimeTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_datetime('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_datetime(None), None)
def test_parse_datetimes(self):
tz = pytz.timezone(pytz.country_timezones['ar'][0])
self.assertEqual(
parsers.parse_datetime('20170730154330'),
datetime(2017, 7, 30, 15, 43, 30, tzinfo=tz),
)
class ParseDateTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_date('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_date(None), None)
def test_parse_dates(self):
self.assertEqual(
parsers.parse_date('20170730'),
date(2017, 7, 30),
)
|
Add tests for parse_date and parse_datetime
|
Add tests for parse_date and parse_datetime
|
Python
|
isc
|
hobarrera/django-afip,hobarrera/django-afip
|
Add tests for parse_date and parse_datetime
|
from datetime import date, datetime
import pytz
from django.test import TestCase
from django_afip import parsers
class ParseDatetimeTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_datetime('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_datetime(None), None)
def test_parse_datetimes(self):
tz = pytz.timezone(pytz.country_timezones['ar'][0])
self.assertEqual(
parsers.parse_datetime('20170730154330'),
datetime(2017, 7, 30, 15, 43, 30, tzinfo=tz),
)
class ParseDateTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_date('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_date(None), None)
def test_parse_dates(self):
self.assertEqual(
parsers.parse_date('20170730'),
date(2017, 7, 30),
)
|
<commit_before><commit_msg>Add tests for parse_date and parse_datetime<commit_after>
|
from datetime import date, datetime
import pytz
from django.test import TestCase
from django_afip import parsers
class ParseDatetimeTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_datetime('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_datetime(None), None)
def test_parse_datetimes(self):
tz = pytz.timezone(pytz.country_timezones['ar'][0])
self.assertEqual(
parsers.parse_datetime('20170730154330'),
datetime(2017, 7, 30, 15, 43, 30, tzinfo=tz),
)
class ParseDateTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_date('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_date(None), None)
def test_parse_dates(self):
self.assertEqual(
parsers.parse_date('20170730'),
date(2017, 7, 30),
)
|
Add tests for parse_date and parse_datetimefrom datetime import date, datetime
import pytz
from django.test import TestCase
from django_afip import parsers
class ParseDatetimeTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_datetime('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_datetime(None), None)
def test_parse_datetimes(self):
tz = pytz.timezone(pytz.country_timezones['ar'][0])
self.assertEqual(
parsers.parse_datetime('20170730154330'),
datetime(2017, 7, 30, 15, 43, 30, tzinfo=tz),
)
class ParseDateTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_date('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_date(None), None)
def test_parse_dates(self):
self.assertEqual(
parsers.parse_date('20170730'),
date(2017, 7, 30),
)
|
<commit_before><commit_msg>Add tests for parse_date and parse_datetime<commit_after>from datetime import date, datetime
import pytz
from django.test import TestCase
from django_afip import parsers
class ParseDatetimeTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_datetime('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_datetime(None), None)
def test_parse_datetimes(self):
tz = pytz.timezone(pytz.country_timezones['ar'][0])
self.assertEqual(
parsers.parse_datetime('20170730154330'),
datetime(2017, 7, 30, 15, 43, 30, tzinfo=tz),
)
class ParseDateTestCase(TestCase):
def test_parse_null(self):
self.assertEqual(parsers.parse_date('NULL'), None)
def test_parse_none(self):
self.assertEqual(parsers.parse_date(None), None)
def test_parse_dates(self):
self.assertEqual(
parsers.parse_date('20170730'),
date(2017, 7, 30),
)
|
|
790230b4a1ad5f2550def7762c723abbd69d743d
|
data/migrations/0002_auto_20170413_1553.py
|
data/migrations/0002_auto_20170413_1553.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-13 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='visitlog',
name='path',
field=models.CharField(max_length=2500),
),
]
|
Increase limit on data log model for larger urls
|
Increase limit on data log model for larger urls
|
Python
|
agpl-3.0
|
MTG/dunya,MTG/dunya,MTG/dunya,MTG/dunya
|
Increase limit on data log model for larger urls
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-13 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='visitlog',
name='path',
field=models.CharField(max_length=2500),
),
]
|
<commit_before><commit_msg>Increase limit on data log model for larger urls<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-13 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='visitlog',
name='path',
field=models.CharField(max_length=2500),
),
]
|
Increase limit on data log model for larger urls# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-13 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='visitlog',
name='path',
field=models.CharField(max_length=2500),
),
]
|
<commit_before><commit_msg>Increase limit on data log model for larger urls<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-13 13:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='visitlog',
name='path',
field=models.CharField(max_length=2500),
),
]
|
|
f5ce04adcce0de23fb156e8dc410379a29467a10
|
api-reader/smartvel_tests.py
|
api-reader/smartvel_tests.py
|
from threading import Thread
from rx import Observable
from APIReaderSmartvel import APIReaderSmartvel
events = Observable.from_(APIReaderSmartvel().get_iterable())
# Verify that all the following regions have events
REGIONS = (
'Barcelona',
'Málaga',
'Palma de Mallorca'
)
# Filters
def has_place(elemet):
return 'place' in elemet['event']
is_in_region = { region: lambda element: element['event']['place']['region']['name'] == region for region in REGIONS }
# Test
def is_not_empty(a_stream):
a_stream.is_empty().subscribe(fail_if_empty)
# Test helper (just a console reporter)
def fail_if_empty(empty):
if empty:
print('stream should not be empty!')
else:
print('good, stream is not empty')
# Launch the test
threads = [ Thread(target=is_not_empty, \
args=(events.filter(has_place).filter(is_in_region[region]), )) \
for region in REGIONS ]
for thread in threads:
thread.start()
|
Implement one of Smartvel's tests using APIReaderSmartvel.
|
Implement one of Smartvel's tests using APIReaderSmartvel.
|
Python
|
mit
|
Pysellus/streaming-api-test,Pysellus/streaming-api-test
|
Implement one of Smartvel's tests using APIReaderSmartvel.
|
from threading import Thread
from rx import Observable
from APIReaderSmartvel import APIReaderSmartvel
events = Observable.from_(APIReaderSmartvel().get_iterable())
# Verify that all the following regions have events
REGIONS = (
'Barcelona',
'Málaga',
'Palma de Mallorca'
)
# Filters
def has_place(elemet):
return 'place' in elemet['event']
is_in_region = { region: lambda element: element['event']['place']['region']['name'] == region for region in REGIONS }
# Test
def is_not_empty(a_stream):
a_stream.is_empty().subscribe(fail_if_empty)
# Test helper (just a console reporter)
def fail_if_empty(empty):
if empty:
print('stream should not be empty!')
else:
print('good, stream is not empty')
# Launch the test
threads = [ Thread(target=is_not_empty, \
args=(events.filter(has_place).filter(is_in_region[region]), )) \
for region in REGIONS ]
for thread in threads:
thread.start()
|
<commit_before><commit_msg>Implement one of Smartvel's tests using APIReaderSmartvel.<commit_after>
|
from threading import Thread
from rx import Observable
from APIReaderSmartvel import APIReaderSmartvel
events = Observable.from_(APIReaderSmartvel().get_iterable())
# Verify that all the following regions have events
REGIONS = (
'Barcelona',
'Málaga',
'Palma de Mallorca'
)
# Filters
def has_place(elemet):
return 'place' in elemet['event']
is_in_region = { region: lambda element: element['event']['place']['region']['name'] == region for region in REGIONS }
# Test
def is_not_empty(a_stream):
a_stream.is_empty().subscribe(fail_if_empty)
# Test helper (just a console reporter)
def fail_if_empty(empty):
if empty:
print('stream should not be empty!')
else:
print('good, stream is not empty')
# Launch the test
threads = [ Thread(target=is_not_empty, \
args=(events.filter(has_place).filter(is_in_region[region]), )) \
for region in REGIONS ]
for thread in threads:
thread.start()
|
Implement one of Smartvel's tests using APIReaderSmartvel.from threading import Thread
from rx import Observable
from APIReaderSmartvel import APIReaderSmartvel
events = Observable.from_(APIReaderSmartvel().get_iterable())
# Verify that all the following regions have events
REGIONS = (
'Barcelona',
'Málaga',
'Palma de Mallorca'
)
# Filters
def has_place(elemet):
return 'place' in elemet['event']
is_in_region = { region: lambda element: element['event']['place']['region']['name'] == region for region in REGIONS }
# Test
def is_not_empty(a_stream):
a_stream.is_empty().subscribe(fail_if_empty)
# Test helper (just a console reporter)
def fail_if_empty(empty):
if empty:
print('stream should not be empty!')
else:
print('good, stream is not empty')
# Launch the test
threads = [ Thread(target=is_not_empty, \
args=(events.filter(has_place).filter(is_in_region[region]), )) \
for region in REGIONS ]
for thread in threads:
thread.start()
|
<commit_before><commit_msg>Implement one of Smartvel's tests using APIReaderSmartvel.<commit_after>from threading import Thread
from rx import Observable
from APIReaderSmartvel import APIReaderSmartvel
events = Observable.from_(APIReaderSmartvel().get_iterable())
# Verify that all the following regions have events
REGIONS = (
'Barcelona',
'Málaga',
'Palma de Mallorca'
)
# Filters
def has_place(elemet):
return 'place' in elemet['event']
is_in_region = { region: lambda element: element['event']['place']['region']['name'] == region for region in REGIONS }
# Test
def is_not_empty(a_stream):
a_stream.is_empty().subscribe(fail_if_empty)
# Test helper (just a console reporter)
def fail_if_empty(empty):
if empty:
print('stream should not be empty!')
else:
print('good, stream is not empty')
# Launch the test
threads = [ Thread(target=is_not_empty, \
args=(events.filter(has_place).filter(is_in_region[region]), )) \
for region in REGIONS ]
for thread in threads:
thread.start()
|
|
16c95be97c55e1c678d7b47f401fbe499faaccf7
|
all-domains/tutorials/cracking-the-coding-interview/hash-tables-ransom-note/solution.py
|
all-domains/tutorials/cracking-the-coding-interview/hash-tables-ransom-note/solution.py
|
# https://www.hackerrank.com/challenges/ctci-ransom-note
# Python 3
def create_word_dictionary(word_list):
word_dictionary = {}
for word in word_list:
if word not in word_dictionary:
word_dictionary[word] = 1
else:
word_dictionary[word] += 1
return word_dictionary
def ransom_note(magazine, ransom):
word_dictionary = create_word_dictionary(magazine)
for word in ransom:
if word not in word_dictionary:
return False
word_dictionary[word] -= 1
if word_dictionary[word] < 0:
return False
return True
m, n = map(int, raw_input().strip().split(' '))
magazine = raw_input().strip().split(' ')
ransom = raw_input().strip().split(' ')
# Expected Output "No"
# m = 15
# n = 17
# magazine = 'o l x imjaw bee khmla v o v o imjaw l khmla imjaw x'.split(' ')
# ransom = 'imjaw l khmla x imjaw o l l o khmla v bee o o imjaw imjaw o'.split(' ')
answer = ransom_note(magazine, ransom)
if(answer):
print("Yes")
else:
print("No")
|
Implement code to determine if a ransom note can be constructed from a magazine
|
Implement code to determine if a ransom note can
be constructed from a magazine
https://www.hackerrank.com/challenges/ctci-ransom-note
|
Python
|
mit
|
arvinsim/hackerrank-solutions
|
Implement code to determine if a ransom note can
be constructed from a magazine
https://www.hackerrank.com/challenges/ctci-ransom-note
|
# https://www.hackerrank.com/challenges/ctci-ransom-note
# Python 3
def create_word_dictionary(word_list):
word_dictionary = {}
for word in word_list:
if word not in word_dictionary:
word_dictionary[word] = 1
else:
word_dictionary[word] += 1
return word_dictionary
def ransom_note(magazine, ransom):
word_dictionary = create_word_dictionary(magazine)
for word in ransom:
if word not in word_dictionary:
return False
word_dictionary[word] -= 1
if word_dictionary[word] < 0:
return False
return True
m, n = map(int, raw_input().strip().split(' '))
magazine = raw_input().strip().split(' ')
ransom = raw_input().strip().split(' ')
# Expected Output "No"
# m = 15
# n = 17
# magazine = 'o l x imjaw bee khmla v o v o imjaw l khmla imjaw x'.split(' ')
# ransom = 'imjaw l khmla x imjaw o l l o khmla v bee o o imjaw imjaw o'.split(' ')
answer = ransom_note(magazine, ransom)
if(answer):
print("Yes")
else:
print("No")
|
<commit_before><commit_msg>Implement code to determine if a ransom note can
be constructed from a magazine
https://www.hackerrank.com/challenges/ctci-ransom-note<commit_after>
|
# https://www.hackerrank.com/challenges/ctci-ransom-note
# Python 3
def create_word_dictionary(word_list):
word_dictionary = {}
for word in word_list:
if word not in word_dictionary:
word_dictionary[word] = 1
else:
word_dictionary[word] += 1
return word_dictionary
def ransom_note(magazine, ransom):
word_dictionary = create_word_dictionary(magazine)
for word in ransom:
if word not in word_dictionary:
return False
word_dictionary[word] -= 1
if word_dictionary[word] < 0:
return False
return True
m, n = map(int, raw_input().strip().split(' '))
magazine = raw_input().strip().split(' ')
ransom = raw_input().strip().split(' ')
# Expected Output "No"
# m = 15
# n = 17
# magazine = 'o l x imjaw bee khmla v o v o imjaw l khmla imjaw x'.split(' ')
# ransom = 'imjaw l khmla x imjaw o l l o khmla v bee o o imjaw imjaw o'.split(' ')
answer = ransom_note(magazine, ransom)
if(answer):
print("Yes")
else:
print("No")
|
Implement code to determine if a ransom note can
be constructed from a magazine
https://www.hackerrank.com/challenges/ctci-ransom-note# https://www.hackerrank.com/challenges/ctci-ransom-note
# Python 3
def create_word_dictionary(word_list):
word_dictionary = {}
for word in word_list:
if word not in word_dictionary:
word_dictionary[word] = 1
else:
word_dictionary[word] += 1
return word_dictionary
def ransom_note(magazine, ransom):
word_dictionary = create_word_dictionary(magazine)
for word in ransom:
if word not in word_dictionary:
return False
word_dictionary[word] -= 1
if word_dictionary[word] < 0:
return False
return True
m, n = map(int, raw_input().strip().split(' '))
magazine = raw_input().strip().split(' ')
ransom = raw_input().strip().split(' ')
# Expected Output "No"
# m = 15
# n = 17
# magazine = 'o l x imjaw bee khmla v o v o imjaw l khmla imjaw x'.split(' ')
# ransom = 'imjaw l khmla x imjaw o l l o khmla v bee o o imjaw imjaw o'.split(' ')
answer = ransom_note(magazine, ransom)
if(answer):
print("Yes")
else:
print("No")
|
<commit_before><commit_msg>Implement code to determine if a ransom note can
be constructed from a magazine
https://www.hackerrank.com/challenges/ctci-ransom-note<commit_after># https://www.hackerrank.com/challenges/ctci-ransom-note
# Python 3
def create_word_dictionary(word_list):
word_dictionary = {}
for word in word_list:
if word not in word_dictionary:
word_dictionary[word] = 1
else:
word_dictionary[word] += 1
return word_dictionary
def ransom_note(magazine, ransom):
word_dictionary = create_word_dictionary(magazine)
for word in ransom:
if word not in word_dictionary:
return False
word_dictionary[word] -= 1
if word_dictionary[word] < 0:
return False
return True
m, n = map(int, raw_input().strip().split(' '))
magazine = raw_input().strip().split(' ')
ransom = raw_input().strip().split(' ')
# Expected Output "No"
# m = 15
# n = 17
# magazine = 'o l x imjaw bee khmla v o v o imjaw l khmla imjaw x'.split(' ')
# ransom = 'imjaw l khmla x imjaw o l l o khmla v bee o o imjaw imjaw o'.split(' ')
answer = ransom_note(magazine, ransom)
if(answer):
print("Yes")
else:
print("No")
|
|
f553451358eca914afa45abb6e6a3a02669df6ec
|
unicornclient/mock/explorerhat.py
|
unicornclient/mock/explorerhat.py
|
# pylint: disable=W1201,C0103
import logging
class _Motor:
def __init__(self, name=None):
self.name = name
if not name:
self.name = 'all'
self.one = _Motor('one')
self.two = _Motor('two')
def invert(self):
logging.debug(self.name + ' invert')
def forwards(self, speed=100):
logging.debug(self.name + ' forwards ' + str(speed))
def backwards(self, speed=100):
logging.debug(self.name + ' backwards ' + str(speed))
def speed(self, speed):
logging.debug(self.name + ' speed ' + str(speed))
def stop(self):
logging.debug(self.name + ' stop')
motor = _Motor()
|
Add explorer phat motor functions mock
|
Add explorer phat motor functions mock
|
Python
|
mit
|
amm0nite/unicornclient,amm0nite/unicornclient
|
Add explorer phat motor functions mock
|
# pylint: disable=W1201,C0103
import logging
class _Motor:
def __init__(self, name=None):
self.name = name
if not name:
self.name = 'all'
self.one = _Motor('one')
self.two = _Motor('two')
def invert(self):
logging.debug(self.name + ' invert')
def forwards(self, speed=100):
logging.debug(self.name + ' forwards ' + str(speed))
def backwards(self, speed=100):
logging.debug(self.name + ' backwards ' + str(speed))
def speed(self, speed):
logging.debug(self.name + ' speed ' + str(speed))
def stop(self):
logging.debug(self.name + ' stop')
motor = _Motor()
|
<commit_before><commit_msg>Add explorer phat motor functions mock<commit_after>
|
# pylint: disable=W1201,C0103
import logging
class _Motor:
def __init__(self, name=None):
self.name = name
if not name:
self.name = 'all'
self.one = _Motor('one')
self.two = _Motor('two')
def invert(self):
logging.debug(self.name + ' invert')
def forwards(self, speed=100):
logging.debug(self.name + ' forwards ' + str(speed))
def backwards(self, speed=100):
logging.debug(self.name + ' backwards ' + str(speed))
def speed(self, speed):
logging.debug(self.name + ' speed ' + str(speed))
def stop(self):
logging.debug(self.name + ' stop')
motor = _Motor()
|
Add explorer phat motor functions mock# pylint: disable=W1201,C0103
import logging
class _Motor:
def __init__(self, name=None):
self.name = name
if not name:
self.name = 'all'
self.one = _Motor('one')
self.two = _Motor('two')
def invert(self):
logging.debug(self.name + ' invert')
def forwards(self, speed=100):
logging.debug(self.name + ' forwards ' + str(speed))
def backwards(self, speed=100):
logging.debug(self.name + ' backwards ' + str(speed))
def speed(self, speed):
logging.debug(self.name + ' speed ' + str(speed))
def stop(self):
logging.debug(self.name + ' stop')
motor = _Motor()
|
<commit_before><commit_msg>Add explorer phat motor functions mock<commit_after># pylint: disable=W1201,C0103
import logging
class _Motor:
def __init__(self, name=None):
self.name = name
if not name:
self.name = 'all'
self.one = _Motor('one')
self.two = _Motor('two')
def invert(self):
logging.debug(self.name + ' invert')
def forwards(self, speed=100):
logging.debug(self.name + ' forwards ' + str(speed))
def backwards(self, speed=100):
logging.debug(self.name + ' backwards ' + str(speed))
def speed(self, speed):
logging.debug(self.name + ' speed ' + str(speed))
def stop(self):
logging.debug(self.name + ' stop')
motor = _Motor()
|
|
1aa64e9d29cd191e5790afca4852e96678d3845e
|
raiden/tests/integration/test_snapshotting.py
|
raiden/tests/integration/test_snapshotting.py
|
# -*- coding: utf-8 -*-
import pytest
from ethereum import slogging
from raiden.api.python import RaidenAPI
log = slogging.get_logger(__name__)
@pytest.mark.parametrize('number_of_nodes', [3])
@pytest.mark.parametrize('number_of_tokens', [1])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('settle_timeout', [16])
@pytest.mark.parametrize('reveal_timeout', [4])
@pytest.mark.parametrize('in_memory_database', [False])
def test_snapshotting(
raiden_network,
token_addresses,
settle_timeout,
blockchain_type):
app0, app1, app2 = raiden_network
api0 = RaidenAPI(app0.raiden)
api1 = RaidenAPI(app1.raiden)
api2 = RaidenAPI(app2.raiden)
channel_0_1 = api0.get_channel_list(token_addresses[0], app1.raiden.address)
channel_0_2 = api0.get_channel_list(token_addresses[0], app2.raiden.address)
with pytest.raises(KeyError):
channel_1_2 = api1.get_channel_list(token_addresses[0], app2.raiden.address)
assert len(channel_0_1) == 1
assert len(channel_0_2) == 1
api1.transfer_and_wait(token_addresses[0], 5, app2.raiden.address)
states = dict()
for num, app in enumerate(raiden_network):
states[num] = dict(
identifiers_to_statemanagers=app.raiden.identifier_to_statemanagers.copy(),
channelgraphs=app.raiden.channelgraphs.copy(),
)
app0.stop(leave_channels=False)
app1.stop(leave_channels=False)
app2.stop(leave_channels=False)
assert app0.raiden.transfer_states_path != app1.raiden.transfer_states_path
for num, app in enumerate(raiden_network):
app.raiden.restore_from_snapshots()
assert states[num]['identifiers_to_statemanagers'] == app.raiden.identifier_to_statemanagers
assert states[num]['channelgraphs'] == app.raiden.channelgraphs
# FIXME: testing the queue snapshot is missing
|
Add test for restart restore behavior
|
Add test for restart restore behavior
|
Python
|
mit
|
hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden
|
Add test for restart restore behavior
|
# -*- coding: utf-8 -*-
import pytest
from ethereum import slogging
from raiden.api.python import RaidenAPI
log = slogging.get_logger(__name__)
@pytest.mark.parametrize('number_of_nodes', [3])
@pytest.mark.parametrize('number_of_tokens', [1])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('settle_timeout', [16])
@pytest.mark.parametrize('reveal_timeout', [4])
@pytest.mark.parametrize('in_memory_database', [False])
def test_snapshotting(
raiden_network,
token_addresses,
settle_timeout,
blockchain_type):
app0, app1, app2 = raiden_network
api0 = RaidenAPI(app0.raiden)
api1 = RaidenAPI(app1.raiden)
api2 = RaidenAPI(app2.raiden)
channel_0_1 = api0.get_channel_list(token_addresses[0], app1.raiden.address)
channel_0_2 = api0.get_channel_list(token_addresses[0], app2.raiden.address)
with pytest.raises(KeyError):
channel_1_2 = api1.get_channel_list(token_addresses[0], app2.raiden.address)
assert len(channel_0_1) == 1
assert len(channel_0_2) == 1
api1.transfer_and_wait(token_addresses[0], 5, app2.raiden.address)
states = dict()
for num, app in enumerate(raiden_network):
states[num] = dict(
identifiers_to_statemanagers=app.raiden.identifier_to_statemanagers.copy(),
channelgraphs=app.raiden.channelgraphs.copy(),
)
app0.stop(leave_channels=False)
app1.stop(leave_channels=False)
app2.stop(leave_channels=False)
assert app0.raiden.transfer_states_path != app1.raiden.transfer_states_path
for num, app in enumerate(raiden_network):
app.raiden.restore_from_snapshots()
assert states[num]['identifiers_to_statemanagers'] == app.raiden.identifier_to_statemanagers
assert states[num]['channelgraphs'] == app.raiden.channelgraphs
# FIXME: testing the queue snapshot is missing
|
<commit_before><commit_msg>Add test for restart restore behavior<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from ethereum import slogging
from raiden.api.python import RaidenAPI
log = slogging.get_logger(__name__)
@pytest.mark.parametrize('number_of_nodes', [3])
@pytest.mark.parametrize('number_of_tokens', [1])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('settle_timeout', [16])
@pytest.mark.parametrize('reveal_timeout', [4])
@pytest.mark.parametrize('in_memory_database', [False])
def test_snapshotting(
raiden_network,
token_addresses,
settle_timeout,
blockchain_type):
app0, app1, app2 = raiden_network
api0 = RaidenAPI(app0.raiden)
api1 = RaidenAPI(app1.raiden)
api2 = RaidenAPI(app2.raiden)
channel_0_1 = api0.get_channel_list(token_addresses[0], app1.raiden.address)
channel_0_2 = api0.get_channel_list(token_addresses[0], app2.raiden.address)
with pytest.raises(KeyError):
channel_1_2 = api1.get_channel_list(token_addresses[0], app2.raiden.address)
assert len(channel_0_1) == 1
assert len(channel_0_2) == 1
api1.transfer_and_wait(token_addresses[0], 5, app2.raiden.address)
states = dict()
for num, app in enumerate(raiden_network):
states[num] = dict(
identifiers_to_statemanagers=app.raiden.identifier_to_statemanagers.copy(),
channelgraphs=app.raiden.channelgraphs.copy(),
)
app0.stop(leave_channels=False)
app1.stop(leave_channels=False)
app2.stop(leave_channels=False)
assert app0.raiden.transfer_states_path != app1.raiden.transfer_states_path
for num, app in enumerate(raiden_network):
app.raiden.restore_from_snapshots()
assert states[num]['identifiers_to_statemanagers'] == app.raiden.identifier_to_statemanagers
assert states[num]['channelgraphs'] == app.raiden.channelgraphs
# FIXME: testing the queue snapshot is missing
|
Add test for restart restore behavior# -*- coding: utf-8 -*-
import pytest
from ethereum import slogging
from raiden.api.python import RaidenAPI
log = slogging.get_logger(__name__)
@pytest.mark.parametrize('number_of_nodes', [3])
@pytest.mark.parametrize('number_of_tokens', [1])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('settle_timeout', [16])
@pytest.mark.parametrize('reveal_timeout', [4])
@pytest.mark.parametrize('in_memory_database', [False])
def test_snapshotting(
raiden_network,
token_addresses,
settle_timeout,
blockchain_type):
app0, app1, app2 = raiden_network
api0 = RaidenAPI(app0.raiden)
api1 = RaidenAPI(app1.raiden)
api2 = RaidenAPI(app2.raiden)
channel_0_1 = api0.get_channel_list(token_addresses[0], app1.raiden.address)
channel_0_2 = api0.get_channel_list(token_addresses[0], app2.raiden.address)
with pytest.raises(KeyError):
channel_1_2 = api1.get_channel_list(token_addresses[0], app2.raiden.address)
assert len(channel_0_1) == 1
assert len(channel_0_2) == 1
api1.transfer_and_wait(token_addresses[0], 5, app2.raiden.address)
states = dict()
for num, app in enumerate(raiden_network):
states[num] = dict(
identifiers_to_statemanagers=app.raiden.identifier_to_statemanagers.copy(),
channelgraphs=app.raiden.channelgraphs.copy(),
)
app0.stop(leave_channels=False)
app1.stop(leave_channels=False)
app2.stop(leave_channels=False)
assert app0.raiden.transfer_states_path != app1.raiden.transfer_states_path
for num, app in enumerate(raiden_network):
app.raiden.restore_from_snapshots()
assert states[num]['identifiers_to_statemanagers'] == app.raiden.identifier_to_statemanagers
assert states[num]['channelgraphs'] == app.raiden.channelgraphs
# FIXME: testing the queue snapshot is missing
|
<commit_before><commit_msg>Add test for restart restore behavior<commit_after># -*- coding: utf-8 -*-
import pytest
from ethereum import slogging
from raiden.api.python import RaidenAPI
log = slogging.get_logger(__name__)
@pytest.mark.parametrize('number_of_nodes', [3])
@pytest.mark.parametrize('number_of_tokens', [1])
@pytest.mark.parametrize('channels_per_node', [1])
@pytest.mark.parametrize('settle_timeout', [16])
@pytest.mark.parametrize('reveal_timeout', [4])
@pytest.mark.parametrize('in_memory_database', [False])
def test_snapshotting(
raiden_network,
token_addresses,
settle_timeout,
blockchain_type):
app0, app1, app2 = raiden_network
api0 = RaidenAPI(app0.raiden)
api1 = RaidenAPI(app1.raiden)
api2 = RaidenAPI(app2.raiden)
channel_0_1 = api0.get_channel_list(token_addresses[0], app1.raiden.address)
channel_0_2 = api0.get_channel_list(token_addresses[0], app2.raiden.address)
with pytest.raises(KeyError):
channel_1_2 = api1.get_channel_list(token_addresses[0], app2.raiden.address)
assert len(channel_0_1) == 1
assert len(channel_0_2) == 1
api1.transfer_and_wait(token_addresses[0], 5, app2.raiden.address)
states = dict()
for num, app in enumerate(raiden_network):
states[num] = dict(
identifiers_to_statemanagers=app.raiden.identifier_to_statemanagers.copy(),
channelgraphs=app.raiden.channelgraphs.copy(),
)
app0.stop(leave_channels=False)
app1.stop(leave_channels=False)
app2.stop(leave_channels=False)
assert app0.raiden.transfer_states_path != app1.raiden.transfer_states_path
for num, app in enumerate(raiden_network):
app.raiden.restore_from_snapshots()
assert states[num]['identifiers_to_statemanagers'] == app.raiden.identifier_to_statemanagers
assert states[num]['channelgraphs'] == app.raiden.channelgraphs
# FIXME: testing the queue snapshot is missing
|
|
c56b012547a17bd61e67fa0c14444dd31c9a0b44
|
scripts/set_pokemon_order.py
|
scripts/set_pokemon_order.py
|
#!/usr/bin/env python2
import sqlite3
conn = sqlite3.connect("pokedex/data/pokedex.sqlite")
cur = conn.execute(
"""select p.id, p.name, pf.name
from pokemon p
join evolution_chains ec on p.evolution_chain_id = ec.id
left join pokemon_forms pf on p.id = pf.unique_pokemon_id
order by ec.id, is_baby = 0, coalesce(pf.form_base_pokemon_id, p.id),
pf."order", pf.name
;""")
idmap = []
for i, row in enumerate(cur):
idmap.append((1 + i, row[0]))
conn.executemany(
"""update pokemon set "order" = ? where id = ?""",
idmap,
)
conn.commit()
|
Add a script for setting pokemon.order
|
Add a script for setting pokemon.order
|
Python
|
mit
|
veekun/pokedex,RK905/pokedex-1,veekun/pokedex,xfix/pokedex,DaMouse404/pokedex,mschex1/pokedex
|
Add a script for setting pokemon.order
|
#!/usr/bin/env python2
import sqlite3
conn = sqlite3.connect("pokedex/data/pokedex.sqlite")
cur = conn.execute(
"""select p.id, p.name, pf.name
from pokemon p
join evolution_chains ec on p.evolution_chain_id = ec.id
left join pokemon_forms pf on p.id = pf.unique_pokemon_id
order by ec.id, is_baby = 0, coalesce(pf.form_base_pokemon_id, p.id),
pf."order", pf.name
;""")
idmap = []
for i, row in enumerate(cur):
idmap.append((1 + i, row[0]))
conn.executemany(
"""update pokemon set "order" = ? where id = ?""",
idmap,
)
conn.commit()
|
<commit_before><commit_msg>Add a script for setting pokemon.order<commit_after>
|
#!/usr/bin/env python2
import sqlite3
conn = sqlite3.connect("pokedex/data/pokedex.sqlite")
cur = conn.execute(
"""select p.id, p.name, pf.name
from pokemon p
join evolution_chains ec on p.evolution_chain_id = ec.id
left join pokemon_forms pf on p.id = pf.unique_pokemon_id
order by ec.id, is_baby = 0, coalesce(pf.form_base_pokemon_id, p.id),
pf."order", pf.name
;""")
idmap = []
for i, row in enumerate(cur):
idmap.append((1 + i, row[0]))
conn.executemany(
"""update pokemon set "order" = ? where id = ?""",
idmap,
)
conn.commit()
|
Add a script for setting pokemon.order#!/usr/bin/env python2
import sqlite3
conn = sqlite3.connect("pokedex/data/pokedex.sqlite")
cur = conn.execute(
"""select p.id, p.name, pf.name
from pokemon p
join evolution_chains ec on p.evolution_chain_id = ec.id
left join pokemon_forms pf on p.id = pf.unique_pokemon_id
order by ec.id, is_baby = 0, coalesce(pf.form_base_pokemon_id, p.id),
pf."order", pf.name
;""")
idmap = []
for i, row in enumerate(cur):
idmap.append((1 + i, row[0]))
conn.executemany(
"""update pokemon set "order" = ? where id = ?""",
idmap,
)
conn.commit()
|
<commit_before><commit_msg>Add a script for setting pokemon.order<commit_after>#!/usr/bin/env python2
import sqlite3
conn = sqlite3.connect("pokedex/data/pokedex.sqlite")
cur = conn.execute(
"""select p.id, p.name, pf.name
from pokemon p
join evolution_chains ec on p.evolution_chain_id = ec.id
left join pokemon_forms pf on p.id = pf.unique_pokemon_id
order by ec.id, is_baby = 0, coalesce(pf.form_base_pokemon_id, p.id),
pf."order", pf.name
;""")
idmap = []
for i, row in enumerate(cur):
idmap.append((1 + i, row[0]))
conn.executemany(
"""update pokemon set "order" = ? where id = ?""",
idmap,
)
conn.commit()
|
|
93fc93dd99af3ae2595096babc60291299826f4b
|
ALE/build_gene_fams_from_eggnogmapper_output.py
|
ALE/build_gene_fams_from_eggnogmapper_output.py
|
import os, re, sys
fams = {}
#read in the output of EggNOG mapper, then make protein families out of sequences which map to the same set of COG/KOG functional categories
inh = open("Para_sacc.fasta.emapper.annotations")
for line in inh:
fields = re.split("\t", line.rstrip())
#print fields[-4]
cogs = re.split(",", fields[-4])
for fam_id in cogs:
bits = re.split("@", fam_id)
if bits[0] in fams:
fams[bits[0]].add(fields[0])
else:
fams[bits[0]] = {fields[0]}
all_keys = fams.keys()
setlist = []
#now loop over all of the COG/KOG/whatever IDs in the fam dictionary, and merge their sets if the sets have any common elements (sequences)
for key in fams.keys():
setlist.append(fams[key])
merged = True
while merged:
merged = False
results = []
while setlist:
common, rest = setlist[0], setlist[1:]
setlist = []
for x in rest:
if x.isdisjoint(common):
setlist.append(x)
else:
merged = True
common |= x
results.append(common)
setlist = results
groupnum = 0 #arbitrary numbering for the gene families
for element in setlist:
groupnum += 1
for seq in element:
print str(groupnum) + "\t" + seq
|
Create gene families based on eggNOG 5 annotation
|
Create gene families based on eggNOG 5 annotation
|
Python
|
mit
|
Tancata/phylo,Tancata/phylo
|
Create gene families based on eggNOG 5 annotation
|
import os, re, sys
fams = {}
#read in the output of EggNOG mapper, then make protein families out of sequences which map to the same set of COG/KOG functional categories
inh = open("Para_sacc.fasta.emapper.annotations")
for line in inh:
fields = re.split("\t", line.rstrip())
#print fields[-4]
cogs = re.split(",", fields[-4])
for fam_id in cogs:
bits = re.split("@", fam_id)
if bits[0] in fams:
fams[bits[0]].add(fields[0])
else:
fams[bits[0]] = {fields[0]}
all_keys = fams.keys()
setlist = []
#now loop over all of the COG/KOG/whatever IDs in the fam dictionary, and merge their sets if the sets have any common elements (sequences)
for key in fams.keys():
setlist.append(fams[key])
merged = True
while merged:
merged = False
results = []
while setlist:
common, rest = setlist[0], setlist[1:]
setlist = []
for x in rest:
if x.isdisjoint(common):
setlist.append(x)
else:
merged = True
common |= x
results.append(common)
setlist = results
groupnum = 0 #arbitrary numbering for the gene families
for element in setlist:
groupnum += 1
for seq in element:
print str(groupnum) + "\t" + seq
|
<commit_before><commit_msg>Create gene families based on eggNOG 5 annotation<commit_after>
|
import os, re, sys
fams = {}
#read in the output of EggNOG mapper, then make protein families out of sequences which map to the same set of COG/KOG functional categories
inh = open("Para_sacc.fasta.emapper.annotations")
for line in inh:
fields = re.split("\t", line.rstrip())
#print fields[-4]
cogs = re.split(",", fields[-4])
for fam_id in cogs:
bits = re.split("@", fam_id)
if bits[0] in fams:
fams[bits[0]].add(fields[0])
else:
fams[bits[0]] = {fields[0]}
all_keys = fams.keys()
setlist = []
#now loop over all of the COG/KOG/whatever IDs in the fam dictionary, and merge their sets if the sets have any common elements (sequences)
for key in fams.keys():
setlist.append(fams[key])
merged = True
while merged:
merged = False
results = []
while setlist:
common, rest = setlist[0], setlist[1:]
setlist = []
for x in rest:
if x.isdisjoint(common):
setlist.append(x)
else:
merged = True
common |= x
results.append(common)
setlist = results
groupnum = 0 #arbitrary numbering for the gene families
for element in setlist:
groupnum += 1
for seq in element:
print str(groupnum) + "\t" + seq
|
Create gene families based on eggNOG 5 annotationimport os, re, sys
fams = {}
#read in the output of EggNOG mapper, then make protein families out of sequences which map to the same set of COG/KOG functional categories
inh = open("Para_sacc.fasta.emapper.annotations")
for line in inh:
fields = re.split("\t", line.rstrip())
#print fields[-4]
cogs = re.split(",", fields[-4])
for fam_id in cogs:
bits = re.split("@", fam_id)
if bits[0] in fams:
fams[bits[0]].add(fields[0])
else:
fams[bits[0]] = {fields[0]}
all_keys = fams.keys()
setlist = []
#now loop over all of the COG/KOG/whatever IDs in the fam dictionary, and merge their sets if the sets have any common elements (sequences)
for key in fams.keys():
setlist.append(fams[key])
merged = True
while merged:
merged = False
results = []
while setlist:
common, rest = setlist[0], setlist[1:]
setlist = []
for x in rest:
if x.isdisjoint(common):
setlist.append(x)
else:
merged = True
common |= x
results.append(common)
setlist = results
groupnum = 0 #arbitrary numbering for the gene families
for element in setlist:
groupnum += 1
for seq in element:
print str(groupnum) + "\t" + seq
|
<commit_before><commit_msg>Create gene families based on eggNOG 5 annotation<commit_after>import os, re, sys
fams = {}
#read in the output of EggNOG mapper, then make protein families out of sequences which map to the same set of COG/KOG functional categories
inh = open("Para_sacc.fasta.emapper.annotations")
for line in inh:
fields = re.split("\t", line.rstrip())
#print fields[-4]
cogs = re.split(",", fields[-4])
for fam_id in cogs:
bits = re.split("@", fam_id)
if bits[0] in fams:
fams[bits[0]].add(fields[0])
else:
fams[bits[0]] = {fields[0]}
all_keys = fams.keys()
setlist = []
#now loop over all of the COG/KOG/whatever IDs in the fam dictionary, and merge their sets if the sets have any common elements (sequences)
for key in fams.keys():
setlist.append(fams[key])
merged = True
while merged:
merged = False
results = []
while setlist:
common, rest = setlist[0], setlist[1:]
setlist = []
for x in rest:
if x.isdisjoint(common):
setlist.append(x)
else:
merged = True
common |= x
results.append(common)
setlist = results
groupnum = 0 #arbitrary numbering for the gene families
for element in setlist:
groupnum += 1
for seq in element:
print str(groupnum) + "\t" + seq
|
|
add67e63dd30a633bcd89db5c7908a6c9eefc059
|
qtpy/tests/test_qdesktopservice_split.py
|
qtpy/tests/test_qdesktopservice_split.py
|
from __future__ import absolute_import
import pytest
from qtpy.QtCore import QStandardPaths
from qtpy.QtGui import QDesktopServices
"""Test QDesktopServices split in Qt5."""
def test_qstandarpath():
"""Test the qtpy.QStandardPaths namespace"""
assert QStandardPaths.StandardLocation is not None
# Attributes from QDesktopServices shouldn't be in QStandardPaths
with pytest.raises(AttributeError) as excinfo:
QStandardPaths.setUrlHandler
def test_qdesktopservice():
"""Test the qtpy.QDesktopServices namespace"""
assert QDesktopServices.setUrlHandler is not None
# Attributes from QStandardPaths shouldn't be in QDesktopServices
with pytest.raises(AttributeError) as excinfo:
QDesktopServices.StandardLocation
|
Add test for QDesktopServices split.
|
Add test for QDesktopServices split.
|
Python
|
mit
|
goanpeca/qtpy,davvid/qtpy,davvid/qtpy,spyder-ide/qtpy,goanpeca/qtpy
|
Add test for QDesktopServices split.
|
from __future__ import absolute_import
import pytest
from qtpy.QtCore import QStandardPaths
from qtpy.QtGui import QDesktopServices
"""Test QDesktopServices split in Qt5."""
def test_qstandarpath():
"""Test the qtpy.QStandardPaths namespace"""
assert QStandardPaths.StandardLocation is not None
# Attributes from QDesktopServices shouldn't be in QStandardPaths
with pytest.raises(AttributeError) as excinfo:
QStandardPaths.setUrlHandler
def test_qdesktopservice():
"""Test the qtpy.QDesktopServices namespace"""
assert QDesktopServices.setUrlHandler is not None
# Attributes from QStandardPaths shouldn't be in QDesktopServices
with pytest.raises(AttributeError) as excinfo:
QDesktopServices.StandardLocation
|
<commit_before><commit_msg>Add test for QDesktopServices split.<commit_after>
|
from __future__ import absolute_import
import pytest
from qtpy.QtCore import QStandardPaths
from qtpy.QtGui import QDesktopServices
"""Test QDesktopServices split in Qt5."""
def test_qstandarpath():
"""Test the qtpy.QStandardPaths namespace"""
assert QStandardPaths.StandardLocation is not None
# Attributes from QDesktopServices shouldn't be in QStandardPaths
with pytest.raises(AttributeError) as excinfo:
QStandardPaths.setUrlHandler
def test_qdesktopservice():
"""Test the qtpy.QDesktopServices namespace"""
assert QDesktopServices.setUrlHandler is not None
# Attributes from QStandardPaths shouldn't be in QDesktopServices
with pytest.raises(AttributeError) as excinfo:
QDesktopServices.StandardLocation
|
Add test for QDesktopServices split.from __future__ import absolute_import
import pytest
from qtpy.QtCore import QStandardPaths
from qtpy.QtGui import QDesktopServices
"""Test QDesktopServices split in Qt5."""
def test_qstandarpath():
"""Test the qtpy.QStandardPaths namespace"""
assert QStandardPaths.StandardLocation is not None
# Attributes from QDesktopServices shouldn't be in QStandardPaths
with pytest.raises(AttributeError) as excinfo:
QStandardPaths.setUrlHandler
def test_qdesktopservice():
"""Test the qtpy.QDesktopServices namespace"""
assert QDesktopServices.setUrlHandler is not None
# Attributes from QStandardPaths shouldn't be in QDesktopServices
with pytest.raises(AttributeError) as excinfo:
QDesktopServices.StandardLocation
|
<commit_before><commit_msg>Add test for QDesktopServices split.<commit_after>from __future__ import absolute_import
import pytest
from qtpy.QtCore import QStandardPaths
from qtpy.QtGui import QDesktopServices
"""Test QDesktopServices split in Qt5."""
def test_qstandarpath():
"""Test the qtpy.QStandardPaths namespace"""
assert QStandardPaths.StandardLocation is not None
# Attributes from QDesktopServices shouldn't be in QStandardPaths
with pytest.raises(AttributeError) as excinfo:
QStandardPaths.setUrlHandler
def test_qdesktopservice():
"""Test the qtpy.QDesktopServices namespace"""
assert QDesktopServices.setUrlHandler is not None
# Attributes from QStandardPaths shouldn't be in QDesktopServices
with pytest.raises(AttributeError) as excinfo:
QDesktopServices.StandardLocation
|
|
919f28ee01aeea4dc25f5c4dbb344ac6d1831526
|
src/utils/xdsinp_to_dict.py
|
src/utils/xdsinp_to_dict.py
|
#!/usr/bin/env python
import json
import sys
class INP2DICT(object):
def __init__(self, xdsinp, xdsdict):
self.xdsinp = xdsinp
self.xdsdict = xdsdict
self.run()
def run(self):
temp = open(self.xdsinp, 'r').readlines()
inp=[]
for line in temp:
inp.append(line.strip())
temp=[]
untrusted_index=1
xds_dict = {}
# A list of experiment dependent keywords that should not make up a
# minimal description of an XDS.INP file or values that you would
# want to possibly change based on conditions.
exp_deps = ['ORGX', 'ORGY', 'DETECTOR_DISTANCE', 'OSCILLATION_RANGE', 'X-RAY_WAVELENGTH',
'NAME_TEMPLATE_OF_DATA_FRAMES', 'MAXIMUM_NUMBER_OF_PROCESSORS',
'MAXIMUM_NUMBER_OF_JOBS', 'JOB', 'UNIT_CELL_CONSTANTS', 'SPACEGROUP',
'REFERENCE_DATA_SET', 'FIT_B-FACTOR_TO_REFERENCE_DATA_SET', 'EXCLUDE_DATA_RANGE',
'DATA_RANGE', 'SPOT_RANGE', "FRIEDEL'S_LAW", 'BACKGROUND_RANGE',
'EXCLUDE_RESOLUTION_RANGE', 'NUMBER_OF_IMAGES_IN_CACHE']
for line in inp:
# Skip if line begins with "!" or has a length too short to contain
# an XDS keyword.
if line.startswith('!') or len(line) < 4:
pass
else:
# If the line contains a comment signaled by an "!", ignore that section.
keyline = line.split('!')[0].strip()
# If the line contains only a single keyword, value pair -
# add that pair to the dict.
# EXCEPTION: if kerword contains 'UNTRUSTED' add and index to it
# before adding it to the dict.
# EXCEPTION: If keyword is part of exp_deps list, don't add to the dict.
for i in range (0, keyline.count('=')):
keyline, sep, value = keyline.rpartition('=')
splitkey = keyline.split(' ')
keyline = ' '.join(splitkey[0:-1])
key = splitkey[-1]
if 'UNTRUSTED' in key:
key = '%s%s' %(key,untrusted_index)
untrusted_index+=1
if key not in exp_deps:
xds_dict[key] = value
with open(self.xdsdict,'w') as file:
json.dump(xds_dict,file)
return()
if __name__ == '__main__':
number_of_args = len(sys.argv) - 1
if number_of_args >= 2:
INP2DICT(xdsinp=sys.argv[1],xdsdict=sys.argv[2])
else:
INP2DICT(xdsinp=sys.argv[1], xdsdict='XDSDICT.json')
|
Add DN's tool for XDS.INP parsing
|
Add DN's tool for XDS.INP parsing
|
Python
|
agpl-3.0
|
RAPD/RAPD,RAPD/RAPD,RAPD/RAPD,RAPD/RAPD,RAPD/RAPD
|
Add DN's tool for XDS.INP parsing
|
#!/usr/bin/env python
import json
import sys
class INP2DICT(object):
def __init__(self, xdsinp, xdsdict):
self.xdsinp = xdsinp
self.xdsdict = xdsdict
self.run()
def run(self):
temp = open(self.xdsinp, 'r').readlines()
inp=[]
for line in temp:
inp.append(line.strip())
temp=[]
untrusted_index=1
xds_dict = {}
# A list of experiment dependent keywords that should not make up a
# minimal description of an XDS.INP file or values that you would
# want to possibly change based on conditions.
exp_deps = ['ORGX', 'ORGY', 'DETECTOR_DISTANCE', 'OSCILLATION_RANGE', 'X-RAY_WAVELENGTH',
'NAME_TEMPLATE_OF_DATA_FRAMES', 'MAXIMUM_NUMBER_OF_PROCESSORS',
'MAXIMUM_NUMBER_OF_JOBS', 'JOB', 'UNIT_CELL_CONSTANTS', 'SPACEGROUP',
'REFERENCE_DATA_SET', 'FIT_B-FACTOR_TO_REFERENCE_DATA_SET', 'EXCLUDE_DATA_RANGE',
'DATA_RANGE', 'SPOT_RANGE', "FRIEDEL'S_LAW", 'BACKGROUND_RANGE',
'EXCLUDE_RESOLUTION_RANGE', 'NUMBER_OF_IMAGES_IN_CACHE']
for line in inp:
# Skip if line begins with "!" or has a length too short to contain
# an XDS keyword.
if line.startswith('!') or len(line) < 4:
pass
else:
# If the line contains a comment signaled by an "!", ignore that section.
keyline = line.split('!')[0].strip()
# If the line contains only a single keyword, value pair -
# add that pair to the dict.
# EXCEPTION: if kerword contains 'UNTRUSTED' add and index to it
# before adding it to the dict.
# EXCEPTION: If keyword is part of exp_deps list, don't add to the dict.
for i in range (0, keyline.count('=')):
keyline, sep, value = keyline.rpartition('=')
splitkey = keyline.split(' ')
keyline = ' '.join(splitkey[0:-1])
key = splitkey[-1]
if 'UNTRUSTED' in key:
key = '%s%s' %(key,untrusted_index)
untrusted_index+=1
if key not in exp_deps:
xds_dict[key] = value
with open(self.xdsdict,'w') as file:
json.dump(xds_dict,file)
return()
if __name__ == '__main__':
number_of_args = len(sys.argv) - 1
if number_of_args >= 2:
INP2DICT(xdsinp=sys.argv[1],xdsdict=sys.argv[2])
else:
INP2DICT(xdsinp=sys.argv[1], xdsdict='XDSDICT.json')
|
<commit_before><commit_msg>Add DN's tool for XDS.INP parsing<commit_after>
|
#!/usr/bin/env python
import json
import sys
class INP2DICT(object):
def __init__(self, xdsinp, xdsdict):
self.xdsinp = xdsinp
self.xdsdict = xdsdict
self.run()
def run(self):
temp = open(self.xdsinp, 'r').readlines()
inp=[]
for line in temp:
inp.append(line.strip())
temp=[]
untrusted_index=1
xds_dict = {}
# A list of experiment dependent keywords that should not make up a
# minimal description of an XDS.INP file or values that you would
# want to possibly change based on conditions.
exp_deps = ['ORGX', 'ORGY', 'DETECTOR_DISTANCE', 'OSCILLATION_RANGE', 'X-RAY_WAVELENGTH',
'NAME_TEMPLATE_OF_DATA_FRAMES', 'MAXIMUM_NUMBER_OF_PROCESSORS',
'MAXIMUM_NUMBER_OF_JOBS', 'JOB', 'UNIT_CELL_CONSTANTS', 'SPACEGROUP',
'REFERENCE_DATA_SET', 'FIT_B-FACTOR_TO_REFERENCE_DATA_SET', 'EXCLUDE_DATA_RANGE',
'DATA_RANGE', 'SPOT_RANGE', "FRIEDEL'S_LAW", 'BACKGROUND_RANGE',
'EXCLUDE_RESOLUTION_RANGE', 'NUMBER_OF_IMAGES_IN_CACHE']
for line in inp:
# Skip if line begins with "!" or has a length too short to contain
# an XDS keyword.
if line.startswith('!') or len(line) < 4:
pass
else:
# If the line contains a comment signaled by an "!", ignore that section.
keyline = line.split('!')[0].strip()
# If the line contains only a single keyword, value pair -
# add that pair to the dict.
# EXCEPTION: if kerword contains 'UNTRUSTED' add and index to it
# before adding it to the dict.
# EXCEPTION: If keyword is part of exp_deps list, don't add to the dict.
for i in range (0, keyline.count('=')):
keyline, sep, value = keyline.rpartition('=')
splitkey = keyline.split(' ')
keyline = ' '.join(splitkey[0:-1])
key = splitkey[-1]
if 'UNTRUSTED' in key:
key = '%s%s' %(key,untrusted_index)
untrusted_index+=1
if key not in exp_deps:
xds_dict[key] = value
with open(self.xdsdict,'w') as file:
json.dump(xds_dict,file)
return()
if __name__ == '__main__':
number_of_args = len(sys.argv) - 1
if number_of_args >= 2:
INP2DICT(xdsinp=sys.argv[1],xdsdict=sys.argv[2])
else:
INP2DICT(xdsinp=sys.argv[1], xdsdict='XDSDICT.json')
|
Add DN's tool for XDS.INP parsing#!/usr/bin/env python
import json
import sys
class INP2DICT(object):
def __init__(self, xdsinp, xdsdict):
self.xdsinp = xdsinp
self.xdsdict = xdsdict
self.run()
def run(self):
temp = open(self.xdsinp, 'r').readlines()
inp=[]
for line in temp:
inp.append(line.strip())
temp=[]
untrusted_index=1
xds_dict = {}
# A list of experiment dependent keywords that should not make up a
# minimal description of an XDS.INP file or values that you would
# want to possibly change based on conditions.
exp_deps = ['ORGX', 'ORGY', 'DETECTOR_DISTANCE', 'OSCILLATION_RANGE', 'X-RAY_WAVELENGTH',
'NAME_TEMPLATE_OF_DATA_FRAMES', 'MAXIMUM_NUMBER_OF_PROCESSORS',
'MAXIMUM_NUMBER_OF_JOBS', 'JOB', 'UNIT_CELL_CONSTANTS', 'SPACEGROUP',
'REFERENCE_DATA_SET', 'FIT_B-FACTOR_TO_REFERENCE_DATA_SET', 'EXCLUDE_DATA_RANGE',
'DATA_RANGE', 'SPOT_RANGE', "FRIEDEL'S_LAW", 'BACKGROUND_RANGE',
'EXCLUDE_RESOLUTION_RANGE', 'NUMBER_OF_IMAGES_IN_CACHE']
for line in inp:
# Skip if line begins with "!" or has a length too short to contain
# an XDS keyword.
if line.startswith('!') or len(line) < 4:
pass
else:
# If the line contains a comment signaled by an "!", ignore that section.
keyline = line.split('!')[0].strip()
# If the line contains only a single keyword, value pair -
# add that pair to the dict.
# EXCEPTION: if kerword contains 'UNTRUSTED' add and index to it
# before adding it to the dict.
# EXCEPTION: If keyword is part of exp_deps list, don't add to the dict.
for i in range (0, keyline.count('=')):
keyline, sep, value = keyline.rpartition('=')
splitkey = keyline.split(' ')
keyline = ' '.join(splitkey[0:-1])
key = splitkey[-1]
if 'UNTRUSTED' in key:
key = '%s%s' %(key,untrusted_index)
untrusted_index+=1
if key not in exp_deps:
xds_dict[key] = value
with open(self.xdsdict,'w') as file:
json.dump(xds_dict,file)
return()
if __name__ == '__main__':
number_of_args = len(sys.argv) - 1
if number_of_args >= 2:
INP2DICT(xdsinp=sys.argv[1],xdsdict=sys.argv[2])
else:
INP2DICT(xdsinp=sys.argv[1], xdsdict='XDSDICT.json')
|
<commit_before><commit_msg>Add DN's tool for XDS.INP parsing<commit_after>#!/usr/bin/env python
import json
import sys
class INP2DICT(object):
def __init__(self, xdsinp, xdsdict):
self.xdsinp = xdsinp
self.xdsdict = xdsdict
self.run()
def run(self):
temp = open(self.xdsinp, 'r').readlines()
inp=[]
for line in temp:
inp.append(line.strip())
temp=[]
untrusted_index=1
xds_dict = {}
# A list of experiment dependent keywords that should not make up a
# minimal description of an XDS.INP file or values that you would
# want to possibly change based on conditions.
exp_deps = ['ORGX', 'ORGY', 'DETECTOR_DISTANCE', 'OSCILLATION_RANGE', 'X-RAY_WAVELENGTH',
'NAME_TEMPLATE_OF_DATA_FRAMES', 'MAXIMUM_NUMBER_OF_PROCESSORS',
'MAXIMUM_NUMBER_OF_JOBS', 'JOB', 'UNIT_CELL_CONSTANTS', 'SPACEGROUP',
'REFERENCE_DATA_SET', 'FIT_B-FACTOR_TO_REFERENCE_DATA_SET', 'EXCLUDE_DATA_RANGE',
'DATA_RANGE', 'SPOT_RANGE', "FRIEDEL'S_LAW", 'BACKGROUND_RANGE',
'EXCLUDE_RESOLUTION_RANGE', 'NUMBER_OF_IMAGES_IN_CACHE']
for line in inp:
# Skip if line begins with "!" or has a length too short to contain
# an XDS keyword.
if line.startswith('!') or len(line) < 4:
pass
else:
# If the line contains a comment signaled by an "!", ignore that section.
keyline = line.split('!')[0].strip()
# If the line contains only a single keyword, value pair -
# add that pair to the dict.
# EXCEPTION: if kerword contains 'UNTRUSTED' add and index to it
# before adding it to the dict.
# EXCEPTION: If keyword is part of exp_deps list, don't add to the dict.
for i in range (0, keyline.count('=')):
keyline, sep, value = keyline.rpartition('=')
splitkey = keyline.split(' ')
keyline = ' '.join(splitkey[0:-1])
key = splitkey[-1]
if 'UNTRUSTED' in key:
key = '%s%s' %(key,untrusted_index)
untrusted_index+=1
if key not in exp_deps:
xds_dict[key] = value
with open(self.xdsdict,'w') as file:
json.dump(xds_dict,file)
return()
if __name__ == '__main__':
number_of_args = len(sys.argv) - 1
if number_of_args >= 2:
INP2DICT(xdsinp=sys.argv[1],xdsdict=sys.argv[2])
else:
INP2DICT(xdsinp=sys.argv[1], xdsdict='XDSDICT.json')
|
|
b272193ed589edf301bf97f9896fc819f6c142c3
|
library/migrations/0013_upgrade_to_fts5.py
|
library/migrations/0013_upgrade_to_fts5.py
|
# Generated by Django 2.1.11 on 2019-08-07 21:43
from django.db import migrations
FORWARDS = [
"""DROP TABLE piece_search""",
"""
CREATE VIRTUAL TABLE piece_search
USING fts5 (id unindexed, body, tokenize = 'porter unicode61')
""",
"""
CREATE TRIGGER library_piece_search_insert
AFTER INSERT ON library_piece
FOR EACH ROW
BEGIN
INSERT INTO piece_search (id, body)
VALUES (NEW.id, NEW.title || ' ' || NEW.subtitle || ' ' || NEW.comment);
END
""",
"""
INSERT INTO piece_search
(id, body)
SELECT piece.id AS id,
piece.title || ' ' ||
piece.subtitle || ' ' ||
piece.comment || ' ' ||
COALESCE(GROUP_CONCAT(composer.given_names || ' ' || composer.surname), '') || ' ' ||
COALESCE(GROUP_CONCAT(arranger.given_names || ' ' || arranger.surname), '') AS body
FROM library_piece AS piece
LEFT JOIN library_piece_composer_artists AS pc ON piece.id = pc.piece_id
LEFT JOIN library_piece_arranger_artists AS pa ON piece.id = pa.piece_id
LEFT JOIN library_artist AS composer ON pc.artist_id = composer.id
LEFT JOIN library_artist AS arranger ON pa.artist_id = arranger.id
GROUP BY piece.id
""",
]
class Migration(migrations.Migration):
dependencies = [
('library', '0012_fix_fts_delete'),
]
operations = [
]
|
Update sqlite fts to fts5
|
[WIP] Update sqlite fts to fts5
I think this will provide better stemming, e.g. searching for "star" will match
"Stars and Strips Forever", which it currently doesn't.
|
Python
|
agpl-3.0
|
cellofellow/symphony,cellofellow/symphony,cellofellow/symphony
|
[WIP] Update sqlite fts to fts5
I think this will provide better stemming, e.g. searching for "star" will match
"Stars and Strips Forever", which it currently doesn't.
|
# Generated by Django 2.1.11 on 2019-08-07 21:43
from django.db import migrations
FORWARDS = [
"""DROP TABLE piece_search""",
"""
CREATE VIRTUAL TABLE piece_search
USING fts5 (id unindexed, body, tokenize = 'porter unicode61')
""",
"""
CREATE TRIGGER library_piece_search_insert
AFTER INSERT ON library_piece
FOR EACH ROW
BEGIN
INSERT INTO piece_search (id, body)
VALUES (NEW.id, NEW.title || ' ' || NEW.subtitle || ' ' || NEW.comment);
END
""",
"""
INSERT INTO piece_search
(id, body)
SELECT piece.id AS id,
piece.title || ' ' ||
piece.subtitle || ' ' ||
piece.comment || ' ' ||
COALESCE(GROUP_CONCAT(composer.given_names || ' ' || composer.surname), '') || ' ' ||
COALESCE(GROUP_CONCAT(arranger.given_names || ' ' || arranger.surname), '') AS body
FROM library_piece AS piece
LEFT JOIN library_piece_composer_artists AS pc ON piece.id = pc.piece_id
LEFT JOIN library_piece_arranger_artists AS pa ON piece.id = pa.piece_id
LEFT JOIN library_artist AS composer ON pc.artist_id = composer.id
LEFT JOIN library_artist AS arranger ON pa.artist_id = arranger.id
GROUP BY piece.id
""",
]
class Migration(migrations.Migration):
dependencies = [
('library', '0012_fix_fts_delete'),
]
operations = [
]
|
<commit_before><commit_msg>[WIP] Update sqlite fts to fts5
I think this will provide better stemming, e.g. searching for "star" will match
"Stars and Strips Forever", which it currently doesn't.<commit_after>
|
# Generated by Django 2.1.11 on 2019-08-07 21:43
from django.db import migrations
FORWARDS = [
"""DROP TABLE piece_search""",
"""
CREATE VIRTUAL TABLE piece_search
USING fts5 (id unindexed, body, tokenize = 'porter unicode61')
""",
"""
CREATE TRIGGER library_piece_search_insert
AFTER INSERT ON library_piece
FOR EACH ROW
BEGIN
INSERT INTO piece_search (id, body)
VALUES (NEW.id, NEW.title || ' ' || NEW.subtitle || ' ' || NEW.comment);
END
""",
"""
INSERT INTO piece_search
(id, body)
SELECT piece.id AS id,
piece.title || ' ' ||
piece.subtitle || ' ' ||
piece.comment || ' ' ||
COALESCE(GROUP_CONCAT(composer.given_names || ' ' || composer.surname), '') || ' ' ||
COALESCE(GROUP_CONCAT(arranger.given_names || ' ' || arranger.surname), '') AS body
FROM library_piece AS piece
LEFT JOIN library_piece_composer_artists AS pc ON piece.id = pc.piece_id
LEFT JOIN library_piece_arranger_artists AS pa ON piece.id = pa.piece_id
LEFT JOIN library_artist AS composer ON pc.artist_id = composer.id
LEFT JOIN library_artist AS arranger ON pa.artist_id = arranger.id
GROUP BY piece.id
""",
]
class Migration(migrations.Migration):
dependencies = [
('library', '0012_fix_fts_delete'),
]
operations = [
]
|
[WIP] Update sqlite fts to fts5
I think this will provide better stemming, e.g. searching for "star" will match
"Stars and Strips Forever", which it currently doesn't.# Generated by Django 2.1.11 on 2019-08-07 21:43
from django.db import migrations
FORWARDS = [
"""DROP TABLE piece_search""",
"""
CREATE VIRTUAL TABLE piece_search
USING fts5 (id unindexed, body, tokenize = 'porter unicode61')
""",
"""
CREATE TRIGGER library_piece_search_insert
AFTER INSERT ON library_piece
FOR EACH ROW
BEGIN
INSERT INTO piece_search (id, body)
VALUES (NEW.id, NEW.title || ' ' || NEW.subtitle || ' ' || NEW.comment);
END
""",
"""
INSERT INTO piece_search
(id, body)
SELECT piece.id AS id,
piece.title || ' ' ||
piece.subtitle || ' ' ||
piece.comment || ' ' ||
COALESCE(GROUP_CONCAT(composer.given_names || ' ' || composer.surname), '') || ' ' ||
COALESCE(GROUP_CONCAT(arranger.given_names || ' ' || arranger.surname), '') AS body
FROM library_piece AS piece
LEFT JOIN library_piece_composer_artists AS pc ON piece.id = pc.piece_id
LEFT JOIN library_piece_arranger_artists AS pa ON piece.id = pa.piece_id
LEFT JOIN library_artist AS composer ON pc.artist_id = composer.id
LEFT JOIN library_artist AS arranger ON pa.artist_id = arranger.id
GROUP BY piece.id
""",
]
class Migration(migrations.Migration):
dependencies = [
('library', '0012_fix_fts_delete'),
]
operations = [
]
|
<commit_before><commit_msg>[WIP] Update sqlite fts to fts5
I think this will provide better stemming, e.g. searching for "star" will match
"Stars and Strips Forever", which it currently doesn't.<commit_after># Generated by Django 2.1.11 on 2019-08-07 21:43
from django.db import migrations
FORWARDS = [
"""DROP TABLE piece_search""",
"""
CREATE VIRTUAL TABLE piece_search
USING fts5 (id unindexed, body, tokenize = 'porter unicode61')
""",
"""
CREATE TRIGGER library_piece_search_insert
AFTER INSERT ON library_piece
FOR EACH ROW
BEGIN
INSERT INTO piece_search (id, body)
VALUES (NEW.id, NEW.title || ' ' || NEW.subtitle || ' ' || NEW.comment);
END
""",
"""
INSERT INTO piece_search
(id, body)
SELECT piece.id AS id,
piece.title || ' ' ||
piece.subtitle || ' ' ||
piece.comment || ' ' ||
COALESCE(GROUP_CONCAT(composer.given_names || ' ' || composer.surname), '') || ' ' ||
COALESCE(GROUP_CONCAT(arranger.given_names || ' ' || arranger.surname), '') AS body
FROM library_piece AS piece
LEFT JOIN library_piece_composer_artists AS pc ON piece.id = pc.piece_id
LEFT JOIN library_piece_arranger_artists AS pa ON piece.id = pa.piece_id
LEFT JOIN library_artist AS composer ON pc.artist_id = composer.id
LEFT JOIN library_artist AS arranger ON pa.artist_id = arranger.id
GROUP BY piece.id
""",
]
class Migration(migrations.Migration):
dependencies = [
('library', '0012_fix_fts_delete'),
]
operations = [
]
|
|
9f0c286f3118236aa760208cf60b2bcc19b3185e
|
projects/migrations/0003_auto_20151120_2037.py
|
projects/migrations/0003_auto_20151120_2037.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151117_1710'),
]
operations = [
migrations.AlterModelOptions(
name='inlistitem',
options={'ordering': ('pk',)},
),
]
|
Add migration to go with 331ce5f
|
Add migration to go with 331ce5f
|
Python
|
mit
|
XeryusTC/projman,XeryusTC/projman,XeryusTC/projman
|
Add migration to go with 331ce5f
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151117_1710'),
]
operations = [
migrations.AlterModelOptions(
name='inlistitem',
options={'ordering': ('pk',)},
),
]
|
<commit_before><commit_msg>Add migration to go with 331ce5f<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151117_1710'),
]
operations = [
migrations.AlterModelOptions(
name='inlistitem',
options={'ordering': ('pk',)},
),
]
|
Add migration to go with 331ce5f# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151117_1710'),
]
operations = [
migrations.AlterModelOptions(
name='inlistitem',
options={'ordering': ('pk',)},
),
]
|
<commit_before><commit_msg>Add migration to go with 331ce5f<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151117_1710'),
]
operations = [
migrations.AlterModelOptions(
name='inlistitem',
options={'ordering': ('pk',)},
),
]
|
|
1eb9d6cbed7c8f2c9a216e8618dadee571a2cdf2
|
pyconcz_2016/speakers/views.py
|
pyconcz_2016/speakers/views.py
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/speakers_list_{}.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/{}_list.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
Fix template name in view
|
Fix template name in view
|
Python
|
mit
|
pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2016
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/speakers_list_{}.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
Fix template name in view
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/{}_list.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
<commit_before>from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/speakers_list_{}.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
<commit_msg>Fix template name in view<commit_after>
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/{}_list.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/speakers_list_{}.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
Fix template name in viewfrom django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/{}_list.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
<commit_before>from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/speakers_list_{}.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
<commit_msg>Fix template name in view<commit_after>from django.template import RequestContext
from django.template.response import TemplateResponse
from pyconcz_2016.speakers.models import Speaker, Slot
def speakers_list(request, type):
speakers = (Speaker.objects.all()
.exclude(**{type: None})
.prefetch_related(type)
.order_by('full_name'))
return TemplateResponse(
request,
template='speakers/{}_list.html'.format(type),
context={'speakers': speakers}
)
def talks_timeline(request):
talks = (Slot.objects.all()
.select_related('talk')
.prefetch_related('talk__speakers')
.order_by('date'))
return TemplateResponse(
request,
template='speakers/talks_timeline.html',
context={
'talks': talks
}
)
|
20ef91b51230811cab0e4edb426993cfb1c63a1a
|
osf/management/commands/checkmigrations.py
|
osf/management/commands/checkmigrations.py
|
"""
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
|
Add a management command to check for unapplied migrations.
|
Add a management command to check for unapplied migrations.
[PLAT-972]
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,aaxelb/osf.io,erinspace/osf.io,Johnetordoff/osf.io,adlius/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,felliott/osf.io,aaxelb/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,adlius/osf.io,aaxelb/osf.io,aaxelb/osf.io,felliott/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,saradbowman/osf.io,cslzchen/osf.io,mattclark/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,adlius/osf.io,mfraezz/osf.io,adlius/osf.io,pattisdr/osf.io,mfraezz/osf.io,mattclark/osf.io,cslzchen/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,felliott/osf.io,brianjgeiger/osf.io,saradbowman/osf.io
|
Add a management command to check for unapplied migrations.
[PLAT-972]
|
"""
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
|
<commit_before><commit_msg>Add a management command to check for unapplied migrations.
[PLAT-972]<commit_after>
|
"""
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
|
Add a management command to check for unapplied migrations.
[PLAT-972]"""
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
|
<commit_before><commit_msg>Add a management command to check for unapplied migrations.
[PLAT-972]<commit_after>"""
Return a non-zero exit code if there are unapplied migrations.
"""
import sys
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.executor import MigrationExecutor
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
connection = connections[DEFAULT_DB_ALIAS]
connection.prepare_database()
executor = MigrationExecutor(connection)
targets = executor.loader.graph.leaf_nodes()
unapplied_migrations = executor.migration_plan(targets)
if unapplied_migrations:
self.stdout.write('The following migrations are unapplied:', self.style.ERROR)
for migration in unapplied_migrations:
self.stdout.write(' {}.{}'.format(migration[0].app_label, migration[0].name), self.style.MIGRATE_LABEL)
sys.exit(1)
self.stdout.write('All migrations have been applied. Have a nice day!', self.style.SUCCESS)
|
|
f8e2e26600694d5a1d9c20da4dcf2bb188269011
|
tests/test_ticketed_features.py
|
tests/test_ticketed_features.py
|
"""A growing set of tests designed to ensure when isort implements a feature described in a ticket
it fully works as defined in the associated ticket.
"""
import isort
def test_semicolon_ignored_for_dynamic_lines_after_import_issue_1178():
"""Test to ensure even if a semicolon is in the decorator in the line following an import
the correct line spacing detrmination will be made.
See: https://github.com/timothycrosley/isort/issues/1178.
"""
assert isort.check_code(
"""
import pytest
@pytest.mark.skip(';')
def test_thing(): pass
""",
show_diff=True,
)
|
Add ticketed features test file
|
Add ticketed features test file
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
Add ticketed features test file
|
"""A growing set of tests designed to ensure when isort implements a feature described in a ticket
it fully works as defined in the associated ticket.
"""
import isort
def test_semicolon_ignored_for_dynamic_lines_after_import_issue_1178():
"""Test to ensure even if a semicolon is in the decorator in the line following an import
the correct line spacing detrmination will be made.
See: https://github.com/timothycrosley/isort/issues/1178.
"""
assert isort.check_code(
"""
import pytest
@pytest.mark.skip(';')
def test_thing(): pass
""",
show_diff=True,
)
|
<commit_before><commit_msg>Add ticketed features test file<commit_after>
|
"""A growing set of tests designed to ensure when isort implements a feature described in a ticket
it fully works as defined in the associated ticket.
"""
import isort
def test_semicolon_ignored_for_dynamic_lines_after_import_issue_1178():
"""Test to ensure even if a semicolon is in the decorator in the line following an import
the correct line spacing detrmination will be made.
See: https://github.com/timothycrosley/isort/issues/1178.
"""
assert isort.check_code(
"""
import pytest
@pytest.mark.skip(';')
def test_thing(): pass
""",
show_diff=True,
)
|
Add ticketed features test file"""A growing set of tests designed to ensure when isort implements a feature described in a ticket
it fully works as defined in the associated ticket.
"""
import isort
def test_semicolon_ignored_for_dynamic_lines_after_import_issue_1178():
"""Test to ensure even if a semicolon is in the decorator in the line following an import
the correct line spacing detrmination will be made.
See: https://github.com/timothycrosley/isort/issues/1178.
"""
assert isort.check_code(
"""
import pytest
@pytest.mark.skip(';')
def test_thing(): pass
""",
show_diff=True,
)
|
<commit_before><commit_msg>Add ticketed features test file<commit_after>"""A growing set of tests designed to ensure when isort implements a feature described in a ticket
it fully works as defined in the associated ticket.
"""
import isort
def test_semicolon_ignored_for_dynamic_lines_after_import_issue_1178():
"""Test to ensure even if a semicolon is in the decorator in the line following an import
the correct line spacing detrmination will be made.
See: https://github.com/timothycrosley/isort/issues/1178.
"""
assert isort.check_code(
"""
import pytest
@pytest.mark.skip(';')
def test_thing(): pass
""",
show_diff=True,
)
|
|
a27ced2b6dc5ff7a48cdb1e3c63c2caa340733a7
|
plugins/CoD_BO.py
|
plugins/CoD_BO.py
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoDBOPlugin(BasePlugin):
Name = "Call of Duty: Black Ops"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty black ops')):
return True
return False
|
Call of Duty: Black Ops plugin
|
Call of Duty: Black Ops plugin
|
Python
|
mit
|
Pr0Ger/SGSB
|
Call of Duty: Black Ops plugin
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoDBOPlugin(BasePlugin):
Name = "Call of Duty: Black Ops"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty black ops')):
return True
return False
|
<commit_before><commit_msg>Call of Duty: Black Ops plugin<commit_after>
|
import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoDBOPlugin(BasePlugin):
Name = "Call of Duty: Black Ops"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty black ops')):
return True
return False
|
Call of Duty: Black Ops pluginimport os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoDBOPlugin(BasePlugin):
Name = "Call of Duty: Black Ops"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty black ops')):
return True
return False
|
<commit_before><commit_msg>Call of Duty: Black Ops plugin<commit_after>import os
from lib.base_plugin import BasePlugin
from lib.paths import SteamGamesPath
class CoDBOPlugin(BasePlugin):
Name = "Call of Duty: Black Ops"
support_os = ["Windows"]
def backup(self, _):
_.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def restore(self, _):
_.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty black ops'), 'players')
def detect(self):
if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty black ops')):
return True
return False
|
|
c864253b9e7341d65e34eba8a9f7a7ea7c85c053
|
scripts/print_view_controller_hierarchy.py
|
scripts/print_view_controller_hierarchy.py
|
"""Prints the current view controller hierarchy.
Usage: pvc
"""
def print_view_controller_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[[UIWindow keyWindow] rootViewController] _printHierarchy]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_controller_hierarchy.print_view_controller_hierarchy pvc')
|
Add print view controller hierarchy command.
|
Add print view controller hierarchy command.
|
Python
|
mit
|
mrhappyasthma/happydebugging,mrhappyasthma/HappyDebugging
|
Add print view controller hierarchy command.
|
"""Prints the current view controller hierarchy.
Usage: pvc
"""
def print_view_controller_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[[UIWindow keyWindow] rootViewController] _printHierarchy]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_controller_hierarchy.print_view_controller_hierarchy pvc')
|
<commit_before><commit_msg>Add print view controller hierarchy command.<commit_after>
|
"""Prints the current view controller hierarchy.
Usage: pvc
"""
def print_view_controller_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[[UIWindow keyWindow] rootViewController] _printHierarchy]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_controller_hierarchy.print_view_controller_hierarchy pvc')
|
Add print view controller hierarchy command."""Prints the current view controller hierarchy.
Usage: pvc
"""
def print_view_controller_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[[UIWindow keyWindow] rootViewController] _printHierarchy]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_controller_hierarchy.print_view_controller_hierarchy pvc')
|
<commit_before><commit_msg>Add print view controller hierarchy command.<commit_after>"""Prints the current view controller hierarchy.
Usage: pvc
"""
def print_view_controller_hierarchy(debugger, command, result, internal_dict):
debugger.HandleCommand('po [[[UIWindow keyWindow] rootViewController] _printHierarchy]')
def __lldb_init_module(debugger, internal_dict):
debugger.HandleCommand('command script add -f print_view_controller_hierarchy.print_view_controller_hierarchy pvc')
|
|
a0b5b0c50335241be1d3ce1db53aa7113ddde81b
|
queries-limits.py
|
queries-limits.py
|
#!/usr/bin/env python
from avocado import main
from sdcm.tester import ClusterTester
from sdcm.tester import clean_aws_resources
class QueriesLimitsTest(ClusterTester):
"""
Test scylla cluster growth (adding nodes after an initial cluster size).
:avocado: enable
"""
@clean_aws_resources
def setUp(self):
self.credentials = None
self.db_cluster = None
self.loaders = None
# we will give a very slow disk to the db node
# so the loader node will easilly saturate it
bdm = [{"DeviceName": "/dev/sda1",
"Ebs": {"Iops": 100,
"VolumeType": "io1",
"DeleteOnTermination": True}}]
# Use big instance to be not throttled by the network
self.init_resources(n_db_nodes=1, n_loader_nodes=1,
dbs_block_device_mappings=bdm,
dbs_type='m4.4xlarge',
loaders_type='m4.4xlarge')
self.loaders.wait_for_init()
self.db_cluster.wait_for_init()
self.stress_thread = None
self.payload = "/tmp/payload"
self.db_cluster.run("grep -v SCYLLA_ARGS /etc/sysconfig/scylla-server > /tmp/l")
self.db_cluster.run("""echo "SCYLLA_ARGS=\"-m 128M -c 1\"" >> /tmp/l""")
self.db_cluster.run("sudo cp /tmp/l /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo chown root.root /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo systemctl stop scylla-server.service")
self.db_cluster.run("sudo systemctl start scylla-server.service")
self.loaders.run("sudo dnf install -y boost-program-options")
self.loaders.run("sudo dnf install -y libuv")
self.loaders.send_file("queries-limits", self.payload)
self.loaders.run("chmod +x " + self.payload)
def test_connection_limits(self):
ips = self.db_cluster.get_node_private_ips()
params = " --servers %s --duration 600 --queries 1000000" % (ips[0])
self.run_stress(stress_cmd=(self.payload + params), duration=10)
if __name__ == '__main__':
main()
|
Add the queries limits test.
|
limits: Add the queries limits test.
Use a C++ payload to check if the queries limitation
works.
Fixes #180.
Signed-off-by: Benoît Canet <ecd1f14f7c1c6dc7a40210bdcc3810e0107ecbc8@scylladb.com>
|
Python
|
agpl-3.0
|
amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-cluster-tests,amoskong/scylla-cluster-tests,amoskong/scylla-cluster-tests,scylladb/scylla-longevity-tests,scylladb/scylla-longevity-tests,amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-longevity-tests,amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests
|
limits: Add the queries limits test.
Use a C++ payload to check if the queries limitation
works.
Fixes #180.
Signed-off-by: Benoît Canet <ecd1f14f7c1c6dc7a40210bdcc3810e0107ecbc8@scylladb.com>
|
#!/usr/bin/env python
from avocado import main
from sdcm.tester import ClusterTester
from sdcm.tester import clean_aws_resources
class QueriesLimitsTest(ClusterTester):
"""
Test scylla cluster growth (adding nodes after an initial cluster size).
:avocado: enable
"""
@clean_aws_resources
def setUp(self):
self.credentials = None
self.db_cluster = None
self.loaders = None
# we will give a very slow disk to the db node
# so the loader node will easilly saturate it
bdm = [{"DeviceName": "/dev/sda1",
"Ebs": {"Iops": 100,
"VolumeType": "io1",
"DeleteOnTermination": True}}]
# Use big instance to be not throttled by the network
self.init_resources(n_db_nodes=1, n_loader_nodes=1,
dbs_block_device_mappings=bdm,
dbs_type='m4.4xlarge',
loaders_type='m4.4xlarge')
self.loaders.wait_for_init()
self.db_cluster.wait_for_init()
self.stress_thread = None
self.payload = "/tmp/payload"
self.db_cluster.run("grep -v SCYLLA_ARGS /etc/sysconfig/scylla-server > /tmp/l")
self.db_cluster.run("""echo "SCYLLA_ARGS=\"-m 128M -c 1\"" >> /tmp/l""")
self.db_cluster.run("sudo cp /tmp/l /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo chown root.root /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo systemctl stop scylla-server.service")
self.db_cluster.run("sudo systemctl start scylla-server.service")
self.loaders.run("sudo dnf install -y boost-program-options")
self.loaders.run("sudo dnf install -y libuv")
self.loaders.send_file("queries-limits", self.payload)
self.loaders.run("chmod +x " + self.payload)
def test_connection_limits(self):
ips = self.db_cluster.get_node_private_ips()
params = " --servers %s --duration 600 --queries 1000000" % (ips[0])
self.run_stress(stress_cmd=(self.payload + params), duration=10)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>limits: Add the queries limits test.
Use a C++ payload to check if the queries limitation
works.
Fixes #180.
Signed-off-by: Benoît Canet <ecd1f14f7c1c6dc7a40210bdcc3810e0107ecbc8@scylladb.com><commit_after>
|
#!/usr/bin/env python
from avocado import main
from sdcm.tester import ClusterTester
from sdcm.tester import clean_aws_resources
class QueriesLimitsTest(ClusterTester):
"""
Test scylla cluster growth (adding nodes after an initial cluster size).
:avocado: enable
"""
@clean_aws_resources
def setUp(self):
self.credentials = None
self.db_cluster = None
self.loaders = None
# we will give a very slow disk to the db node
# so the loader node will easilly saturate it
bdm = [{"DeviceName": "/dev/sda1",
"Ebs": {"Iops": 100,
"VolumeType": "io1",
"DeleteOnTermination": True}}]
# Use big instance to be not throttled by the network
self.init_resources(n_db_nodes=1, n_loader_nodes=1,
dbs_block_device_mappings=bdm,
dbs_type='m4.4xlarge',
loaders_type='m4.4xlarge')
self.loaders.wait_for_init()
self.db_cluster.wait_for_init()
self.stress_thread = None
self.payload = "/tmp/payload"
self.db_cluster.run("grep -v SCYLLA_ARGS /etc/sysconfig/scylla-server > /tmp/l")
self.db_cluster.run("""echo "SCYLLA_ARGS=\"-m 128M -c 1\"" >> /tmp/l""")
self.db_cluster.run("sudo cp /tmp/l /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo chown root.root /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo systemctl stop scylla-server.service")
self.db_cluster.run("sudo systemctl start scylla-server.service")
self.loaders.run("sudo dnf install -y boost-program-options")
self.loaders.run("sudo dnf install -y libuv")
self.loaders.send_file("queries-limits", self.payload)
self.loaders.run("chmod +x " + self.payload)
def test_connection_limits(self):
ips = self.db_cluster.get_node_private_ips()
params = " --servers %s --duration 600 --queries 1000000" % (ips[0])
self.run_stress(stress_cmd=(self.payload + params), duration=10)
if __name__ == '__main__':
main()
|
limits: Add the queries limits test.
Use a C++ payload to check if the queries limitation
works.
Fixes #180.
Signed-off-by: Benoît Canet <ecd1f14f7c1c6dc7a40210bdcc3810e0107ecbc8@scylladb.com>#!/usr/bin/env python
from avocado import main
from sdcm.tester import ClusterTester
from sdcm.tester import clean_aws_resources
class QueriesLimitsTest(ClusterTester):
"""
Test scylla cluster growth (adding nodes after an initial cluster size).
:avocado: enable
"""
@clean_aws_resources
def setUp(self):
self.credentials = None
self.db_cluster = None
self.loaders = None
# we will give a very slow disk to the db node
# so the loader node will easilly saturate it
bdm = [{"DeviceName": "/dev/sda1",
"Ebs": {"Iops": 100,
"VolumeType": "io1",
"DeleteOnTermination": True}}]
# Use big instance to be not throttled by the network
self.init_resources(n_db_nodes=1, n_loader_nodes=1,
dbs_block_device_mappings=bdm,
dbs_type='m4.4xlarge',
loaders_type='m4.4xlarge')
self.loaders.wait_for_init()
self.db_cluster.wait_for_init()
self.stress_thread = None
self.payload = "/tmp/payload"
self.db_cluster.run("grep -v SCYLLA_ARGS /etc/sysconfig/scylla-server > /tmp/l")
self.db_cluster.run("""echo "SCYLLA_ARGS=\"-m 128M -c 1\"" >> /tmp/l""")
self.db_cluster.run("sudo cp /tmp/l /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo chown root.root /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo systemctl stop scylla-server.service")
self.db_cluster.run("sudo systemctl start scylla-server.service")
self.loaders.run("sudo dnf install -y boost-program-options")
self.loaders.run("sudo dnf install -y libuv")
self.loaders.send_file("queries-limits", self.payload)
self.loaders.run("chmod +x " + self.payload)
def test_connection_limits(self):
ips = self.db_cluster.get_node_private_ips()
params = " --servers %s --duration 600 --queries 1000000" % (ips[0])
self.run_stress(stress_cmd=(self.payload + params), duration=10)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>limits: Add the queries limits test.
Use a C++ payload to check if the queries limitation
works.
Fixes #180.
Signed-off-by: Benoît Canet <ecd1f14f7c1c6dc7a40210bdcc3810e0107ecbc8@scylladb.com><commit_after>#!/usr/bin/env python
from avocado import main
from sdcm.tester import ClusterTester
from sdcm.tester import clean_aws_resources
class QueriesLimitsTest(ClusterTester):
"""
Test scylla cluster growth (adding nodes after an initial cluster size).
:avocado: enable
"""
@clean_aws_resources
def setUp(self):
self.credentials = None
self.db_cluster = None
self.loaders = None
# we will give a very slow disk to the db node
# so the loader node will easilly saturate it
bdm = [{"DeviceName": "/dev/sda1",
"Ebs": {"Iops": 100,
"VolumeType": "io1",
"DeleteOnTermination": True}}]
# Use big instance to be not throttled by the network
self.init_resources(n_db_nodes=1, n_loader_nodes=1,
dbs_block_device_mappings=bdm,
dbs_type='m4.4xlarge',
loaders_type='m4.4xlarge')
self.loaders.wait_for_init()
self.db_cluster.wait_for_init()
self.stress_thread = None
self.payload = "/tmp/payload"
self.db_cluster.run("grep -v SCYLLA_ARGS /etc/sysconfig/scylla-server > /tmp/l")
self.db_cluster.run("""echo "SCYLLA_ARGS=\"-m 128M -c 1\"" >> /tmp/l""")
self.db_cluster.run("sudo cp /tmp/l /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo chown root.root /etc/sysconfig/scylla-server")
self.db_cluster.run("sudo systemctl stop scylla-server.service")
self.db_cluster.run("sudo systemctl start scylla-server.service")
self.loaders.run("sudo dnf install -y boost-program-options")
self.loaders.run("sudo dnf install -y libuv")
self.loaders.send_file("queries-limits", self.payload)
self.loaders.run("chmod +x " + self.payload)
def test_connection_limits(self):
ips = self.db_cluster.get_node_private_ips()
params = " --servers %s --duration 600 --queries 1000000" % (ips[0])
self.run_stress(stress_cmd=(self.payload + params), duration=10)
if __name__ == '__main__':
main()
|
|
f065bd7933a79055b140ffd973042d62eb31d4e0
|
blue-tether.py
|
blue-tether.py
|
#!/usr/bin/env python
import dbus
import subprocess
import sys
# Replace this with the bluetooth address of the Bluetooth Network Access Point
dev_bdaddr = '12_34_56_AB_CD_EF'
# dhcp_client = ['/sbin/dhclient', '-v']
dhcp_client = ['/sbin/udhcpc', '-i']
def main():
bus = dbus.SystemBus()
bluez_proxy = bus.get_object('org.bluez', '/')
bluez_manager = dbus.Interface(bluez_proxy, 'org.bluez.Manager')
adapter = bluez_manager.DefaultAdapter()
# adapter_proxy = bus.get_object('org.bluez', adapter)
dev_proxy = bus.get_object('org.bluez', '%s/dev_%s' % (adapter, dev_bdaddr))
adapter_network = dbus.Interface(dev_proxy, 'org.bluez.Network')
# adapter_introspect = dbus.Interface(dev_proxy, 'org.freedesktop.DBus.Introspectable')
# print adapter_introspect.Introspect()
# print adapter_network.Disconnect()
net_interface = adapter_network.Connect('NAP') # 'GN' / 'NAP' ?
print '%s created' % net_interface
dhcp = subprocess.Popen(dhcp_client + [net_interface], stdout=sys.stdout)
raw_input('Press enter to close connection\n')
dhcp.kill()
dhcp.wait()
if __name__ == '__main__':
main()
|
Add script to bring up bluetooth network
|
Add script to bring up bluetooth network
<rant>
I haven't managed to find any good documentation on this (or pretty much
anything bluez 4.x related) at all (the blueman source implements this,
but it's not the easiest source in the world to follow) - this was
created after a lot of trial and error staring at the bluez dbus
interface in d-feet until I saw the bnep0 interface appear! Only for it
to disappear a moment later :(
Unfortunately, it turns out that using dbus-send is not an option
because bluez removes the bnep0 interface the moment the dbus client
goes away (WHY???). I'm beginning to think that the bluez 4.x developers
are purposefully trying to make the console experience as unusable as
possible... For the love of god would someone please implement a curses
version of blueman... Please?
</rant>
So... Here's the python version that keeps the dbus connection open
until enter is pressed so that the bnep0 interface doesn't suddenly go
away.
This is a very simple script that currently only handles a pretty
specific scenario - where the box this is run on is connecting to
another device that is acting as a bluetooth network access point and is
running a dhcp server.
I'm specifically using it to connect to my Optus MyTab (which is my
primary Internet connection at home) without having to pull up blueman
every time. I also want to use this to allow my N9 (with a voice SIM) to
easily use the Internet from the tablet when I'm out and about without
having to activate the WiFi tethering (which uses more power, is
unreliable unless something is holding a wakelock and requires prior
manual activation from the tablet).
One problem I've found using bluetooth networking is that only one
device appears to be able to connect to my tablet at a time... AFAIK
this is NOT an inherent limitation of bluetooth networking, so it's
possible I'm doing somthing wrong (roles?), or CM7 only allows one
device to connect (or more specifically, to DHCP) at a time... More
investigation required.
The bluetooth address of the device to connect to is currently
hard-coded in the script.
To use this on a Nokia N9 (eventually I want to package this up and
figure out how to add a plugin to the N9's network manager, but for
now):
1. Install python-dbus with:
apt-get install python-dbus
2. Remove 'network' from the 'DisablePlugins' line in:
/etc/bluetooth/main.conf
3. Restart the bluetooth daemon with:
pkill -SIGHUP bluetoothd
4. Pair to the device to connect to via the bluetooth menu in the N9
5. Edit the bluetooth address in the script
6. Run the script from the terminal under develsh:
develsh -c ./blue-tether.py
7. If using an app that does not recognise that an internet connection
is present and insists on bringing up the network manager, create a new
dummy Ad-Hoc wireless network with static IP, etc. all set to 0.0.0.0
Signed-off-by: Ian Munsie <ce389fd2fa887d610219cb85dea6dc9451973e74@gmail.com>
|
Python
|
mit
|
DarkStarSword/junk,DarkStarSword/junk,DarkStarSword/junk,DarkStarSword/junk,DarkStarSword/junk
|
Add script to bring up bluetooth network
<rant>
I haven't managed to find any good documentation on this (or pretty much
anything bluez 4.x related) at all (the blueman source implements this,
but it's not the easiest source in the world to follow) - this was
created after a lot of trial and error staring at the bluez dbus
interface in d-feet until I saw the bnep0 interface appear! Only for it
to disappear a moment later :(
Unfortunately, it turns out that using dbus-send is not an option
because bluez removes the bnep0 interface the moment the dbus client
goes away (WHY???). I'm beginning to think that the bluez 4.x developers
are purposefully trying to make the console experience as unusable as
possible... For the love of god would someone please implement a curses
version of blueman... Please?
</rant>
So... Here's the python version that keeps the dbus connection open
until enter is pressed so that the bnep0 interface doesn't suddenly go
away.
This is a very simple script that currently only handles a pretty
specific scenario - where the box this is run on is connecting to
another device that is acting as a bluetooth network access point and is
running a dhcp server.
I'm specifically using it to connect to my Optus MyTab (which is my
primary Internet connection at home) without having to pull up blueman
every time. I also want to use this to allow my N9 (with a voice SIM) to
easily use the Internet from the tablet when I'm out and about without
having to activate the WiFi tethering (which uses more power, is
unreliable unless something is holding a wakelock and requires prior
manual activation from the tablet).
One problem I've found using bluetooth networking is that only one
device appears to be able to connect to my tablet at a time... AFAIK
this is NOT an inherent limitation of bluetooth networking, so it's
possible I'm doing somthing wrong (roles?), or CM7 only allows one
device to connect (or more specifically, to DHCP) at a time... More
investigation required.
The bluetooth address of the device to connect to is currently
hard-coded in the script.
To use this on a Nokia N9 (eventually I want to package this up and
figure out how to add a plugin to the N9's network manager, but for
now):
1. Install python-dbus with:
apt-get install python-dbus
2. Remove 'network' from the 'DisablePlugins' line in:
/etc/bluetooth/main.conf
3. Restart the bluetooth daemon with:
pkill -SIGHUP bluetoothd
4. Pair to the device to connect to via the bluetooth menu in the N9
5. Edit the bluetooth address in the script
6. Run the script from the terminal under develsh:
develsh -c ./blue-tether.py
7. If using an app that does not recognise that an internet connection
is present and insists on bringing up the network manager, create a new
dummy Ad-Hoc wireless network with static IP, etc. all set to 0.0.0.0
Signed-off-by: Ian Munsie <ce389fd2fa887d610219cb85dea6dc9451973e74@gmail.com>
|
#!/usr/bin/env python
import dbus
import subprocess
import sys
# Replace this with the bluetooth address of the Bluetooth Network Access Point
dev_bdaddr = '12_34_56_AB_CD_EF'
# dhcp_client = ['/sbin/dhclient', '-v']
dhcp_client = ['/sbin/udhcpc', '-i']
def main():
bus = dbus.SystemBus()
bluez_proxy = bus.get_object('org.bluez', '/')
bluez_manager = dbus.Interface(bluez_proxy, 'org.bluez.Manager')
adapter = bluez_manager.DefaultAdapter()
# adapter_proxy = bus.get_object('org.bluez', adapter)
dev_proxy = bus.get_object('org.bluez', '%s/dev_%s' % (adapter, dev_bdaddr))
adapter_network = dbus.Interface(dev_proxy, 'org.bluez.Network')
# adapter_introspect = dbus.Interface(dev_proxy, 'org.freedesktop.DBus.Introspectable')
# print adapter_introspect.Introspect()
# print adapter_network.Disconnect()
net_interface = adapter_network.Connect('NAP') # 'GN' / 'NAP' ?
print '%s created' % net_interface
dhcp = subprocess.Popen(dhcp_client + [net_interface], stdout=sys.stdout)
raw_input('Press enter to close connection\n')
dhcp.kill()
dhcp.wait()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to bring up bluetooth network
<rant>
I haven't managed to find any good documentation on this (or pretty much
anything bluez 4.x related) at all (the blueman source implements this,
but it's not the easiest source in the world to follow) - this was
created after a lot of trial and error staring at the bluez dbus
interface in d-feet until I saw the bnep0 interface appear! Only for it
to disappear a moment later :(
Unfortunately, it turns out that using dbus-send is not an option
because bluez removes the bnep0 interface the moment the dbus client
goes away (WHY???). I'm beginning to think that the bluez 4.x developers
are purposefully trying to make the console experience as unusable as
possible... For the love of god would someone please implement a curses
version of blueman... Please?
</rant>
So... Here's the python version that keeps the dbus connection open
until enter is pressed so that the bnep0 interface doesn't suddenly go
away.
This is a very simple script that currently only handles a pretty
specific scenario - where the box this is run on is connecting to
another device that is acting as a bluetooth network access point and is
running a dhcp server.
I'm specifically using it to connect to my Optus MyTab (which is my
primary Internet connection at home) without having to pull up blueman
every time. I also want to use this to allow my N9 (with a voice SIM) to
easily use the Internet from the tablet when I'm out and about without
having to activate the WiFi tethering (which uses more power, is
unreliable unless something is holding a wakelock and requires prior
manual activation from the tablet).
One problem I've found using bluetooth networking is that only one
device appears to be able to connect to my tablet at a time... AFAIK
this is NOT an inherent limitation of bluetooth networking, so it's
possible I'm doing somthing wrong (roles?), or CM7 only allows one
device to connect (or more specifically, to DHCP) at a time... More
investigation required.
The bluetooth address of the device to connect to is currently
hard-coded in the script.
To use this on a Nokia N9 (eventually I want to package this up and
figure out how to add a plugin to the N9's network manager, but for
now):
1. Install python-dbus with:
apt-get install python-dbus
2. Remove 'network' from the 'DisablePlugins' line in:
/etc/bluetooth/main.conf
3. Restart the bluetooth daemon with:
pkill -SIGHUP bluetoothd
4. Pair to the device to connect to via the bluetooth menu in the N9
5. Edit the bluetooth address in the script
6. Run the script from the terminal under develsh:
develsh -c ./blue-tether.py
7. If using an app that does not recognise that an internet connection
is present and insists on bringing up the network manager, create a new
dummy Ad-Hoc wireless network with static IP, etc. all set to 0.0.0.0
Signed-off-by: Ian Munsie <ce389fd2fa887d610219cb85dea6dc9451973e74@gmail.com><commit_after>
|
#!/usr/bin/env python
import dbus
import subprocess
import sys
# Replace this with the bluetooth address of the Bluetooth Network Access Point
dev_bdaddr = '12_34_56_AB_CD_EF'
# dhcp_client = ['/sbin/dhclient', '-v']
dhcp_client = ['/sbin/udhcpc', '-i']
def main():
bus = dbus.SystemBus()
bluez_proxy = bus.get_object('org.bluez', '/')
bluez_manager = dbus.Interface(bluez_proxy, 'org.bluez.Manager')
adapter = bluez_manager.DefaultAdapter()
# adapter_proxy = bus.get_object('org.bluez', adapter)
dev_proxy = bus.get_object('org.bluez', '%s/dev_%s' % (adapter, dev_bdaddr))
adapter_network = dbus.Interface(dev_proxy, 'org.bluez.Network')
# adapter_introspect = dbus.Interface(dev_proxy, 'org.freedesktop.DBus.Introspectable')
# print adapter_introspect.Introspect()
# print adapter_network.Disconnect()
net_interface = adapter_network.Connect('NAP') # 'GN' / 'NAP' ?
print '%s created' % net_interface
dhcp = subprocess.Popen(dhcp_client + [net_interface], stdout=sys.stdout)
raw_input('Press enter to close connection\n')
dhcp.kill()
dhcp.wait()
if __name__ == '__main__':
main()
|
Add script to bring up bluetooth network
<rant>
I haven't managed to find any good documentation on this (or pretty much
anything bluez 4.x related) at all (the blueman source implements this,
but it's not the easiest source in the world to follow) - this was
created after a lot of trial and error staring at the bluez dbus
interface in d-feet until I saw the bnep0 interface appear! Only for it
to disappear a moment later :(
Unfortunately, it turns out that using dbus-send is not an option
because bluez removes the bnep0 interface the moment the dbus client
goes away (WHY???). I'm beginning to think that the bluez 4.x developers
are purposefully trying to make the console experience as unusable as
possible... For the love of god would someone please implement a curses
version of blueman... Please?
</rant>
So... Here's the python version that keeps the dbus connection open
until enter is pressed so that the bnep0 interface doesn't suddenly go
away.
This is a very simple script that currently only handles a pretty
specific scenario - where the box this is run on is connecting to
another device that is acting as a bluetooth network access point and is
running a dhcp server.
I'm specifically using it to connect to my Optus MyTab (which is my
primary Internet connection at home) without having to pull up blueman
every time. I also want to use this to allow my N9 (with a voice SIM) to
easily use the Internet from the tablet when I'm out and about without
having to activate the WiFi tethering (which uses more power, is
unreliable unless something is holding a wakelock and requires prior
manual activation from the tablet).
One problem I've found using bluetooth networking is that only one
device appears to be able to connect to my tablet at a time... AFAIK
this is NOT an inherent limitation of bluetooth networking, so it's
possible I'm doing somthing wrong (roles?), or CM7 only allows one
device to connect (or more specifically, to DHCP) at a time... More
investigation required.
The bluetooth address of the device to connect to is currently
hard-coded in the script.
To use this on a Nokia N9 (eventually I want to package this up and
figure out how to add a plugin to the N9's network manager, but for
now):
1. Install python-dbus with:
apt-get install python-dbus
2. Remove 'network' from the 'DisablePlugins' line in:
/etc/bluetooth/main.conf
3. Restart the bluetooth daemon with:
pkill -SIGHUP bluetoothd
4. Pair to the device to connect to via the bluetooth menu in the N9
5. Edit the bluetooth address in the script
6. Run the script from the terminal under develsh:
develsh -c ./blue-tether.py
7. If using an app that does not recognise that an internet connection
is present and insists on bringing up the network manager, create a new
dummy Ad-Hoc wireless network with static IP, etc. all set to 0.0.0.0
Signed-off-by: Ian Munsie <ce389fd2fa887d610219cb85dea6dc9451973e74@gmail.com>#!/usr/bin/env python
import dbus
import subprocess
import sys
# Replace this with the bluetooth address of the Bluetooth Network Access Point
dev_bdaddr = '12_34_56_AB_CD_EF'
# dhcp_client = ['/sbin/dhclient', '-v']
dhcp_client = ['/sbin/udhcpc', '-i']
def main():
bus = dbus.SystemBus()
bluez_proxy = bus.get_object('org.bluez', '/')
bluez_manager = dbus.Interface(bluez_proxy, 'org.bluez.Manager')
adapter = bluez_manager.DefaultAdapter()
# adapter_proxy = bus.get_object('org.bluez', adapter)
dev_proxy = bus.get_object('org.bluez', '%s/dev_%s' % (adapter, dev_bdaddr))
adapter_network = dbus.Interface(dev_proxy, 'org.bluez.Network')
# adapter_introspect = dbus.Interface(dev_proxy, 'org.freedesktop.DBus.Introspectable')
# print adapter_introspect.Introspect()
# print adapter_network.Disconnect()
net_interface = adapter_network.Connect('NAP') # 'GN' / 'NAP' ?
print '%s created' % net_interface
dhcp = subprocess.Popen(dhcp_client + [net_interface], stdout=sys.stdout)
raw_input('Press enter to close connection\n')
dhcp.kill()
dhcp.wait()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to bring up bluetooth network
<rant>
I haven't managed to find any good documentation on this (or pretty much
anything bluez 4.x related) at all (the blueman source implements this,
but it's not the easiest source in the world to follow) - this was
created after a lot of trial and error staring at the bluez dbus
interface in d-feet until I saw the bnep0 interface appear! Only for it
to disappear a moment later :(
Unfortunately, it turns out that using dbus-send is not an option
because bluez removes the bnep0 interface the moment the dbus client
goes away (WHY???). I'm beginning to think that the bluez 4.x developers
are purposefully trying to make the console experience as unusable as
possible... For the love of god would someone please implement a curses
version of blueman... Please?
</rant>
So... Here's the python version that keeps the dbus connection open
until enter is pressed so that the bnep0 interface doesn't suddenly go
away.
This is a very simple script that currently only handles a pretty
specific scenario - where the box this is run on is connecting to
another device that is acting as a bluetooth network access point and is
running a dhcp server.
I'm specifically using it to connect to my Optus MyTab (which is my
primary Internet connection at home) without having to pull up blueman
every time. I also want to use this to allow my N9 (with a voice SIM) to
easily use the Internet from the tablet when I'm out and about without
having to activate the WiFi tethering (which uses more power, is
unreliable unless something is holding a wakelock and requires prior
manual activation from the tablet).
One problem I've found using bluetooth networking is that only one
device appears to be able to connect to my tablet at a time... AFAIK
this is NOT an inherent limitation of bluetooth networking, so it's
possible I'm doing somthing wrong (roles?), or CM7 only allows one
device to connect (or more specifically, to DHCP) at a time... More
investigation required.
The bluetooth address of the device to connect to is currently
hard-coded in the script.
To use this on a Nokia N9 (eventually I want to package this up and
figure out how to add a plugin to the N9's network manager, but for
now):
1. Install python-dbus with:
apt-get install python-dbus
2. Remove 'network' from the 'DisablePlugins' line in:
/etc/bluetooth/main.conf
3. Restart the bluetooth daemon with:
pkill -SIGHUP bluetoothd
4. Pair to the device to connect to via the bluetooth menu in the N9
5. Edit the bluetooth address in the script
6. Run the script from the terminal under develsh:
develsh -c ./blue-tether.py
7. If using an app that does not recognise that an internet connection
is present and insists on bringing up the network manager, create a new
dummy Ad-Hoc wireless network with static IP, etc. all set to 0.0.0.0
Signed-off-by: Ian Munsie <ce389fd2fa887d610219cb85dea6dc9451973e74@gmail.com><commit_after>#!/usr/bin/env python
import dbus
import subprocess
import sys
# Replace this with the bluetooth address of the Bluetooth Network Access Point
dev_bdaddr = '12_34_56_AB_CD_EF'
# dhcp_client = ['/sbin/dhclient', '-v']
dhcp_client = ['/sbin/udhcpc', '-i']
def main():
bus = dbus.SystemBus()
bluez_proxy = bus.get_object('org.bluez', '/')
bluez_manager = dbus.Interface(bluez_proxy, 'org.bluez.Manager')
adapter = bluez_manager.DefaultAdapter()
# adapter_proxy = bus.get_object('org.bluez', adapter)
dev_proxy = bus.get_object('org.bluez', '%s/dev_%s' % (adapter, dev_bdaddr))
adapter_network = dbus.Interface(dev_proxy, 'org.bluez.Network')
# adapter_introspect = dbus.Interface(dev_proxy, 'org.freedesktop.DBus.Introspectable')
# print adapter_introspect.Introspect()
# print adapter_network.Disconnect()
net_interface = adapter_network.Connect('NAP') # 'GN' / 'NAP' ?
print '%s created' % net_interface
dhcp = subprocess.Popen(dhcp_client + [net_interface], stdout=sys.stdout)
raw_input('Press enter to close connection\n')
dhcp.kill()
dhcp.wait()
if __name__ == '__main__':
main()
|
|
395d534e44a5318e221f41d1fefbea3c10dc73f5
|
in-class-code/2017-03-06-simulatingKinematics.py
|
in-class-code/2017-03-06-simulatingKinematics.py
|
### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
|
Add in-class code for simulating motion
|
Add in-class code for simulating motion
|
Python
|
agpl-3.0
|
ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak
|
Add in-class code for simulating motion
|
### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
|
<commit_before><commit_msg>Add in-class code for simulating motion<commit_after>
|
### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
|
Add in-class code for simulating motion### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
|
<commit_before><commit_msg>Add in-class code for simulating motion<commit_after>### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
|
|
21388952e9cd7e9d4259d63843a7ff782aa09ca2
|
tests/func/test_version.py
|
tests/func/test_version.py
|
from dvc.main import main
from dvc.command.version import CmdVersion
from dvc.cli import parse_args
from tests.basic_env import TestDvc
class TestVersion(TestDvc):
def test_run(self):
ret = main(["version"])
self.assertEqual(ret, 0)
def test_info(dvc, mocker, caplog):
cmd = CmdVersion(parse_args(["version"]))
cmd.run()
for record in caplog.records:
assert "DVC version" in record.msg
assert "Python version" in record.msg
assert "Platform" in record.msg
|
Write tests for `dvc version`
|
Write tests for `dvc version`
|
Python
|
apache-2.0
|
dmpetrov/dataversioncontrol,efiop/dvc,dmpetrov/dataversioncontrol,efiop/dvc
|
Write tests for `dvc version`
|
from dvc.main import main
from dvc.command.version import CmdVersion
from dvc.cli import parse_args
from tests.basic_env import TestDvc
class TestVersion(TestDvc):
def test_run(self):
ret = main(["version"])
self.assertEqual(ret, 0)
def test_info(dvc, mocker, caplog):
cmd = CmdVersion(parse_args(["version"]))
cmd.run()
for record in caplog.records:
assert "DVC version" in record.msg
assert "Python version" in record.msg
assert "Platform" in record.msg
|
<commit_before><commit_msg>Write tests for `dvc version`<commit_after>
|
from dvc.main import main
from dvc.command.version import CmdVersion
from dvc.cli import parse_args
from tests.basic_env import TestDvc
class TestVersion(TestDvc):
def test_run(self):
ret = main(["version"])
self.assertEqual(ret, 0)
def test_info(dvc, mocker, caplog):
cmd = CmdVersion(parse_args(["version"]))
cmd.run()
for record in caplog.records:
assert "DVC version" in record.msg
assert "Python version" in record.msg
assert "Platform" in record.msg
|
Write tests for `dvc version`from dvc.main import main
from dvc.command.version import CmdVersion
from dvc.cli import parse_args
from tests.basic_env import TestDvc
class TestVersion(TestDvc):
def test_run(self):
ret = main(["version"])
self.assertEqual(ret, 0)
def test_info(dvc, mocker, caplog):
cmd = CmdVersion(parse_args(["version"]))
cmd.run()
for record in caplog.records:
assert "DVC version" in record.msg
assert "Python version" in record.msg
assert "Platform" in record.msg
|
<commit_before><commit_msg>Write tests for `dvc version`<commit_after>from dvc.main import main
from dvc.command.version import CmdVersion
from dvc.cli import parse_args
from tests.basic_env import TestDvc
class TestVersion(TestDvc):
def test_run(self):
ret = main(["version"])
self.assertEqual(ret, 0)
def test_info(dvc, mocker, caplog):
cmd = CmdVersion(parse_args(["version"]))
cmd.run()
for record in caplog.records:
assert "DVC version" in record.msg
assert "Python version" in record.msg
assert "Platform" in record.msg
|
|
beba0e2c9a80ce8b2bddf2e8470ad7161be097d1
|
bloatfinder.py
|
bloatfinder.py
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This prints out the tests that open the most windows.
import sys
test = None
count = 0
counts = {}
for l in sys.stdin:
if l.find("TEST-START") > -1:
if test:
counts.setdefault(count, []).append(test)
count = 0
test = l.split('|')[1].strip()
if l.find("++DOMWINDOW") > -1:
count += 1
keys = sorted(counts.keys())
keys.reverse()
for k in keys[:10]:
print k, ', '.join(counts[k])
|
Add script to find tests that open a lot of windows
|
Add script to find tests that open a lot of windows
|
Python
|
mpl-2.0
|
amccreight/mochitest-logs
|
Add script to find tests that open a lot of windows
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This prints out the tests that open the most windows.
import sys
test = None
count = 0
counts = {}
for l in sys.stdin:
if l.find("TEST-START") > -1:
if test:
counts.setdefault(count, []).append(test)
count = 0
test = l.split('|')[1].strip()
if l.find("++DOMWINDOW") > -1:
count += 1
keys = sorted(counts.keys())
keys.reverse()
for k in keys[:10]:
print k, ', '.join(counts[k])
|
<commit_before><commit_msg>Add script to find tests that open a lot of windows<commit_after>
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This prints out the tests that open the most windows.
import sys
test = None
count = 0
counts = {}
for l in sys.stdin:
if l.find("TEST-START") > -1:
if test:
counts.setdefault(count, []).append(test)
count = 0
test = l.split('|')[1].strip()
if l.find("++DOMWINDOW") > -1:
count += 1
keys = sorted(counts.keys())
keys.reverse()
for k in keys[:10]:
print k, ', '.join(counts[k])
|
Add script to find tests that open a lot of windows#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This prints out the tests that open the most windows.
import sys
test = None
count = 0
counts = {}
for l in sys.stdin:
if l.find("TEST-START") > -1:
if test:
counts.setdefault(count, []).append(test)
count = 0
test = l.split('|')[1].strip()
if l.find("++DOMWINDOW") > -1:
count += 1
keys = sorted(counts.keys())
keys.reverse()
for k in keys[:10]:
print k, ', '.join(counts[k])
|
<commit_before><commit_msg>Add script to find tests that open a lot of windows<commit_after>#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This prints out the tests that open the most windows.
import sys
test = None
count = 0
counts = {}
for l in sys.stdin:
if l.find("TEST-START") > -1:
if test:
counts.setdefault(count, []).append(test)
count = 0
test = l.split('|')[1].strip()
if l.find("++DOMWINDOW") > -1:
count += 1
keys = sorted(counts.keys())
keys.reverse()
for k in keys[:10]:
print k, ', '.join(counts[k])
|
|
143e09c5e62435c91a4c7e20a963bfc26451f517
|
tests/app/soc/modules/gci/views/test_org_home.py
|
tests/app/soc/modules/gci/views/test_org_home.py
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI org homepage.
"""
__authors__ = [
'"Praveen Kumar" <praveen97uma@gmail.com>',
]
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.task import GCITask
from tests.test_utils import GCIDjangoTestCase
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class OrgHomeTest(GCIDjangoTestCase):
"""Tests the GCI org homepage.
"""
def setUp(self):
self.init()
self.url = '/gci/org/' + self.org.key().name()
def assertTemplatesUsed(self, response):
"""Asserts if all the templates required to correctly render the page
were used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/base.html')
self.assertTemplateUsed(
response, 'v2/modules/gci/org_home/_open_tasks.html')
self.assertTemplateUsed(
response, "v2/modules/gci/org_home/_contact_us.html")
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/_about_us.html')
self.assertTemplateUsed(response, 'v2/soc/list/lists.html')
self.assertTemplateUsed(response, 'v2/soc/list/list.html')
def testOpenTasksList(self):
"""Tests if the list of open tasks is rendered.
"""
task_prop = {'status': 'Open', 'program': self.gci, 'org': self.org}
seeder_logic.seed(GCITask, task_prop)
seeder_logic.seed(GCITask, task_prop)
response = self.get(self.url)
self.assertResponseOK(response)
self.assertTemplatesUsed(response)
#list = self.getListResponse(self.url, idx=0)
#print list
list_data = self.getListData(self.url, 0)
self.assertEqual(len(list_data), 2)
|
Add tests for org_home view.
|
Add tests for org_home view.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
Add tests for org_home view.
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI org homepage.
"""
__authors__ = [
'"Praveen Kumar" <praveen97uma@gmail.com>',
]
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.task import GCITask
from tests.test_utils import GCIDjangoTestCase
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class OrgHomeTest(GCIDjangoTestCase):
"""Tests the GCI org homepage.
"""
def setUp(self):
self.init()
self.url = '/gci/org/' + self.org.key().name()
def assertTemplatesUsed(self, response):
"""Asserts if all the templates required to correctly render the page
were used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/base.html')
self.assertTemplateUsed(
response, 'v2/modules/gci/org_home/_open_tasks.html')
self.assertTemplateUsed(
response, "v2/modules/gci/org_home/_contact_us.html")
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/_about_us.html')
self.assertTemplateUsed(response, 'v2/soc/list/lists.html')
self.assertTemplateUsed(response, 'v2/soc/list/list.html')
def testOpenTasksList(self):
"""Tests if the list of open tasks is rendered.
"""
task_prop = {'status': 'Open', 'program': self.gci, 'org': self.org}
seeder_logic.seed(GCITask, task_prop)
seeder_logic.seed(GCITask, task_prop)
response = self.get(self.url)
self.assertResponseOK(response)
self.assertTemplatesUsed(response)
#list = self.getListResponse(self.url, idx=0)
#print list
list_data = self.getListData(self.url, 0)
self.assertEqual(len(list_data), 2)
|
<commit_before><commit_msg>Add tests for org_home view.<commit_after>
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI org homepage.
"""
__authors__ = [
'"Praveen Kumar" <praveen97uma@gmail.com>',
]
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.task import GCITask
from tests.test_utils import GCIDjangoTestCase
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class OrgHomeTest(GCIDjangoTestCase):
"""Tests the GCI org homepage.
"""
def setUp(self):
self.init()
self.url = '/gci/org/' + self.org.key().name()
def assertTemplatesUsed(self, response):
"""Asserts if all the templates required to correctly render the page
were used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/base.html')
self.assertTemplateUsed(
response, 'v2/modules/gci/org_home/_open_tasks.html')
self.assertTemplateUsed(
response, "v2/modules/gci/org_home/_contact_us.html")
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/_about_us.html')
self.assertTemplateUsed(response, 'v2/soc/list/lists.html')
self.assertTemplateUsed(response, 'v2/soc/list/list.html')
def testOpenTasksList(self):
"""Tests if the list of open tasks is rendered.
"""
task_prop = {'status': 'Open', 'program': self.gci, 'org': self.org}
seeder_logic.seed(GCITask, task_prop)
seeder_logic.seed(GCITask, task_prop)
response = self.get(self.url)
self.assertResponseOK(response)
self.assertTemplatesUsed(response)
#list = self.getListResponse(self.url, idx=0)
#print list
list_data = self.getListData(self.url, 0)
self.assertEqual(len(list_data), 2)
|
Add tests for org_home view.#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI org homepage.
"""
__authors__ = [
'"Praveen Kumar" <praveen97uma@gmail.com>',
]
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.task import GCITask
from tests.test_utils import GCIDjangoTestCase
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class OrgHomeTest(GCIDjangoTestCase):
"""Tests the GCI org homepage.
"""
def setUp(self):
self.init()
self.url = '/gci/org/' + self.org.key().name()
def assertTemplatesUsed(self, response):
"""Asserts if all the templates required to correctly render the page
were used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/base.html')
self.assertTemplateUsed(
response, 'v2/modules/gci/org_home/_open_tasks.html')
self.assertTemplateUsed(
response, "v2/modules/gci/org_home/_contact_us.html")
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/_about_us.html')
self.assertTemplateUsed(response, 'v2/soc/list/lists.html')
self.assertTemplateUsed(response, 'v2/soc/list/list.html')
def testOpenTasksList(self):
"""Tests if the list of open tasks is rendered.
"""
task_prop = {'status': 'Open', 'program': self.gci, 'org': self.org}
seeder_logic.seed(GCITask, task_prop)
seeder_logic.seed(GCITask, task_prop)
response = self.get(self.url)
self.assertResponseOK(response)
self.assertTemplatesUsed(response)
#list = self.getListResponse(self.url, idx=0)
#print list
list_data = self.getListData(self.url, 0)
self.assertEqual(len(list_data), 2)
|
<commit_before><commit_msg>Add tests for org_home view.<commit_after>#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the view for GCI org homepage.
"""
__authors__ = [
'"Praveen Kumar" <praveen97uma@gmail.com>',
]
from soc.modules.gci.models.organization import GCIOrganization
from soc.modules.gci.models.task import GCITask
from tests.test_utils import GCIDjangoTestCase
from soc.modules.seeder.logic.seeder import logic as seeder_logic
class OrgHomeTest(GCIDjangoTestCase):
"""Tests the GCI org homepage.
"""
def setUp(self):
self.init()
self.url = '/gci/org/' + self.org.key().name()
def assertTemplatesUsed(self, response):
"""Asserts if all the templates required to correctly render the page
were used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/base.html')
self.assertTemplateUsed(
response, 'v2/modules/gci/org_home/_open_tasks.html')
self.assertTemplateUsed(
response, "v2/modules/gci/org_home/_contact_us.html")
self.assertTemplateUsed(response, 'v2/modules/gci/org_home/_about_us.html')
self.assertTemplateUsed(response, 'v2/soc/list/lists.html')
self.assertTemplateUsed(response, 'v2/soc/list/list.html')
def testOpenTasksList(self):
"""Tests if the list of open tasks is rendered.
"""
task_prop = {'status': 'Open', 'program': self.gci, 'org': self.org}
seeder_logic.seed(GCITask, task_prop)
seeder_logic.seed(GCITask, task_prop)
response = self.get(self.url)
self.assertResponseOK(response)
self.assertTemplatesUsed(response)
#list = self.getListResponse(self.url, idx=0)
#print list
list_data = self.getListData(self.url, 0)
self.assertEqual(len(list_data), 2)
|
|
b958f2b327d54b55073cafa7f733b950f64ee075
|
tohu/custom_generator_v2.py
|
tohu/custom_generator_v2.py
|
import logging
logger = logging.getLogger('tohu')
class CustomGeneratorMetaV2(type):
def __new__(metacls, cg_name, bases, clsdict):
logger.debug('[DDD] CustomGeneratorMetaV2.__new__')
logger.debug(f' - metacls={metacls}')
logger.debug(f' - cg_name={cg_name}')
logger.debug(f' - bases={bases}')
logger.debug(f' - clsdict={clsdict}')
new_obj = super(CustomGeneratorMetaV2, metacls).__new__(metacls, cg_name, bases, clsdict)
logger.debug(f' -- new_obj={new_obj}')
def reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
new_obj.reset = reset
return new_obj
|
Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)
|
Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)
|
Python
|
mit
|
maxalbert/tohu
|
Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)
|
import logging
logger = logging.getLogger('tohu')
class CustomGeneratorMetaV2(type):
def __new__(metacls, cg_name, bases, clsdict):
logger.debug('[DDD] CustomGeneratorMetaV2.__new__')
logger.debug(f' - metacls={metacls}')
logger.debug(f' - cg_name={cg_name}')
logger.debug(f' - bases={bases}')
logger.debug(f' - clsdict={clsdict}')
new_obj = super(CustomGeneratorMetaV2, metacls).__new__(metacls, cg_name, bases, clsdict)
logger.debug(f' -- new_obj={new_obj}')
def reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
new_obj.reset = reset
return new_obj
|
<commit_before><commit_msg>Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)<commit_after>
|
import logging
logger = logging.getLogger('tohu')
class CustomGeneratorMetaV2(type):
def __new__(metacls, cg_name, bases, clsdict):
logger.debug('[DDD] CustomGeneratorMetaV2.__new__')
logger.debug(f' - metacls={metacls}')
logger.debug(f' - cg_name={cg_name}')
logger.debug(f' - bases={bases}')
logger.debug(f' - clsdict={clsdict}')
new_obj = super(CustomGeneratorMetaV2, metacls).__new__(metacls, cg_name, bases, clsdict)
logger.debug(f' -- new_obj={new_obj}')
def reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
new_obj.reset = reset
return new_obj
|
Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)import logging
logger = logging.getLogger('tohu')
class CustomGeneratorMetaV2(type):
def __new__(metacls, cg_name, bases, clsdict):
logger.debug('[DDD] CustomGeneratorMetaV2.__new__')
logger.debug(f' - metacls={metacls}')
logger.debug(f' - cg_name={cg_name}')
logger.debug(f' - bases={bases}')
logger.debug(f' - clsdict={clsdict}')
new_obj = super(CustomGeneratorMetaV2, metacls).__new__(metacls, cg_name, bases, clsdict)
logger.debug(f' -- new_obj={new_obj}')
def reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
new_obj.reset = reset
return new_obj
|
<commit_before><commit_msg>Add stub for CustomGeneratorMetaV2 class (which will be a fresh and decluttered implementation of CustomGeneratorMeta)<commit_after>import logging
logger = logging.getLogger('tohu')
class CustomGeneratorMetaV2(type):
def __new__(metacls, cg_name, bases, clsdict):
logger.debug('[DDD] CustomGeneratorMetaV2.__new__')
logger.debug(f' - metacls={metacls}')
logger.debug(f' - cg_name={cg_name}')
logger.debug(f' - bases={bases}')
logger.debug(f' - clsdict={clsdict}')
new_obj = super(CustomGeneratorMetaV2, metacls).__new__(metacls, cg_name, bases, clsdict)
logger.debug(f' -- new_obj={new_obj}')
def reset(self, seed=None):
logger.debug(f'[EEE] Inside automatically generated reset() method for {self} (seed={seed})')
new_obj.reset = reset
return new_obj
|
|
3359e4325fe30f883476cdeed6411a063de925af
|
filters/lerp.py
|
filters/lerp.py
|
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
f, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 1.0, 11):
# A linear interpolation filter with a factor of `i`.
w, h = signal.freqz([(1.0 - i), i])
# Scale x-axis to Hz.
x = w * 44100 / (2 * np.pi)
# Plot amplitude response on the dB scale.
ax1.plot(x, 20 * np.log10(abs(h)), color='c', alpha=1.0 - i)
# Plot phase response in radians.
ax2.plot(x, np.unwrap(np.angle(h)), color='c', alpha=1.0 - i)
ax1.set_title('Amplitude Response (dB)')
ax2.set_title('Phase Response (radians)')
ax2.set_yticks(np.linspace(-1.0, 0.0, 5) * np.pi)
ax2.set_yticklabels([r'$-\pi$', r'$-\frac{3\pi}{4}$', r'$-\frac{\pi}{2}$',
r'$-\frac{\pi}{4}$', r'$0$'])
ax1.axis('tight')
ax2.axis('tight')
ax1.grid()
ax2.grid()
plt.show()
|
Add exploration of linear interp filter freq response
|
Add exploration of linear interp filter freq response
|
Python
|
mit
|
nick-thompson/dsp,nick-thompson/wavetable
|
Add exploration of linear interp filter freq response
|
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
f, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 1.0, 11):
# A linear interpolation filter with a factor of `i`.
w, h = signal.freqz([(1.0 - i), i])
# Scale x-axis to Hz.
x = w * 44100 / (2 * np.pi)
# Plot amplitude response on the dB scale.
ax1.plot(x, 20 * np.log10(abs(h)), color='c', alpha=1.0 - i)
# Plot phase response in radians.
ax2.plot(x, np.unwrap(np.angle(h)), color='c', alpha=1.0 - i)
ax1.set_title('Amplitude Response (dB)')
ax2.set_title('Phase Response (radians)')
ax2.set_yticks(np.linspace(-1.0, 0.0, 5) * np.pi)
ax2.set_yticklabels([r'$-\pi$', r'$-\frac{3\pi}{4}$', r'$-\frac{\pi}{2}$',
r'$-\frac{\pi}{4}$', r'$0$'])
ax1.axis('tight')
ax2.axis('tight')
ax1.grid()
ax2.grid()
plt.show()
|
<commit_before><commit_msg>Add exploration of linear interp filter freq response<commit_after>
|
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
f, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 1.0, 11):
# A linear interpolation filter with a factor of `i`.
w, h = signal.freqz([(1.0 - i), i])
# Scale x-axis to Hz.
x = w * 44100 / (2 * np.pi)
# Plot amplitude response on the dB scale.
ax1.plot(x, 20 * np.log10(abs(h)), color='c', alpha=1.0 - i)
# Plot phase response in radians.
ax2.plot(x, np.unwrap(np.angle(h)), color='c', alpha=1.0 - i)
ax1.set_title('Amplitude Response (dB)')
ax2.set_title('Phase Response (radians)')
ax2.set_yticks(np.linspace(-1.0, 0.0, 5) * np.pi)
ax2.set_yticklabels([r'$-\pi$', r'$-\frac{3\pi}{4}$', r'$-\frac{\pi}{2}$',
r'$-\frac{\pi}{4}$', r'$0$'])
ax1.axis('tight')
ax2.axis('tight')
ax1.grid()
ax2.grid()
plt.show()
|
Add exploration of linear interp filter freq responseimport matplotlib.pyplot as plt
import numpy as np
from scipy import signal
f, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 1.0, 11):
# A linear interpolation filter with a factor of `i`.
w, h = signal.freqz([(1.0 - i), i])
# Scale x-axis to Hz.
x = w * 44100 / (2 * np.pi)
# Plot amplitude response on the dB scale.
ax1.plot(x, 20 * np.log10(abs(h)), color='c', alpha=1.0 - i)
# Plot phase response in radians.
ax2.plot(x, np.unwrap(np.angle(h)), color='c', alpha=1.0 - i)
ax1.set_title('Amplitude Response (dB)')
ax2.set_title('Phase Response (radians)')
ax2.set_yticks(np.linspace(-1.0, 0.0, 5) * np.pi)
ax2.set_yticklabels([r'$-\pi$', r'$-\frac{3\pi}{4}$', r'$-\frac{\pi}{2}$',
r'$-\frac{\pi}{4}$', r'$0$'])
ax1.axis('tight')
ax2.axis('tight')
ax1.grid()
ax2.grid()
plt.show()
|
<commit_before><commit_msg>Add exploration of linear interp filter freq response<commit_after>import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
f, (ax1, ax2) = plt.subplots(2, sharex=True)
for i in np.linspace(0.0, 1.0, 11):
# A linear interpolation filter with a factor of `i`.
w, h = signal.freqz([(1.0 - i), i])
# Scale x-axis to Hz.
x = w * 44100 / (2 * np.pi)
# Plot amplitude response on the dB scale.
ax1.plot(x, 20 * np.log10(abs(h)), color='c', alpha=1.0 - i)
# Plot phase response in radians.
ax2.plot(x, np.unwrap(np.angle(h)), color='c', alpha=1.0 - i)
ax1.set_title('Amplitude Response (dB)')
ax2.set_title('Phase Response (radians)')
ax2.set_yticks(np.linspace(-1.0, 0.0, 5) * np.pi)
ax2.set_yticklabels([r'$-\pi$', r'$-\frac{3\pi}{4}$', r'$-\frac{\pi}{2}$',
r'$-\frac{\pi}{4}$', r'$0$'])
ax1.axis('tight')
ax2.axis('tight')
ax1.grid()
ax2.grid()
plt.show()
|
|
4ba0e32ca02567204cf8d219c212f6f14e36ad9c
|
src/methods.py
|
src/methods.py
|
"""
Integration methods
"""
import numpy as np
import matplotlib.pyplot as plt
def euler(at, vt, xt, dt):
vtpdt = vt + at * dt
xtpdt = xt + vt * dt
return vtpdt, xtpdt
def test_euler():
# test with a spring force F = -kx
x0 = 0
v0 = 1
k = 100
dt = .01
t = np.arange(0, 5, dt)
x = [x0]
v = [v0]
for tt in t[1:]:
at = -k * x[-1]
vt, xt = euler(at, v[-1], x[-1], dt)
x.append(xt)
v.append(vt)
assert len(t) == len(x), "{}, {}".format(len(t), len(x))
plt.figure()
plt.title(r'Euler integration on a spring with $x_0$={}, $v_0$={}'
.format(x0, v0))
plt.plot(t, x)
plt.show()
if __name__ == '__main__':
test_euler()
|
Create Euler method, test it on a spring
|
Create Euler method, test it on a spring
|
Python
|
mit
|
mancaf/planetarium
|
Create Euler method, test it on a spring
|
"""
Integration methods
"""
import numpy as np
import matplotlib.pyplot as plt
def euler(at, vt, xt, dt):
vtpdt = vt + at * dt
xtpdt = xt + vt * dt
return vtpdt, xtpdt
def test_euler():
# test with a spring force F = -kx
x0 = 0
v0 = 1
k = 100
dt = .01
t = np.arange(0, 5, dt)
x = [x0]
v = [v0]
for tt in t[1:]:
at = -k * x[-1]
vt, xt = euler(at, v[-1], x[-1], dt)
x.append(xt)
v.append(vt)
assert len(t) == len(x), "{}, {}".format(len(t), len(x))
plt.figure()
plt.title(r'Euler integration on a spring with $x_0$={}, $v_0$={}'
.format(x0, v0))
plt.plot(t, x)
plt.show()
if __name__ == '__main__':
test_euler()
|
<commit_before><commit_msg>Create Euler method, test it on a spring<commit_after>
|
"""
Integration methods
"""
import numpy as np
import matplotlib.pyplot as plt
def euler(at, vt, xt, dt):
vtpdt = vt + at * dt
xtpdt = xt + vt * dt
return vtpdt, xtpdt
def test_euler():
# test with a spring force F = -kx
x0 = 0
v0 = 1
k = 100
dt = .01
t = np.arange(0, 5, dt)
x = [x0]
v = [v0]
for tt in t[1:]:
at = -k * x[-1]
vt, xt = euler(at, v[-1], x[-1], dt)
x.append(xt)
v.append(vt)
assert len(t) == len(x), "{}, {}".format(len(t), len(x))
plt.figure()
plt.title(r'Euler integration on a spring with $x_0$={}, $v_0$={}'
.format(x0, v0))
plt.plot(t, x)
plt.show()
if __name__ == '__main__':
test_euler()
|
Create Euler method, test it on a spring"""
Integration methods
"""
import numpy as np
import matplotlib.pyplot as plt
def euler(at, vt, xt, dt):
vtpdt = vt + at * dt
xtpdt = xt + vt * dt
return vtpdt, xtpdt
def test_euler():
# test with a spring force F = -kx
x0 = 0
v0 = 1
k = 100
dt = .01
t = np.arange(0, 5, dt)
x = [x0]
v = [v0]
for tt in t[1:]:
at = -k * x[-1]
vt, xt = euler(at, v[-1], x[-1], dt)
x.append(xt)
v.append(vt)
assert len(t) == len(x), "{}, {}".format(len(t), len(x))
plt.figure()
plt.title(r'Euler integration on a spring with $x_0$={}, $v_0$={}'
.format(x0, v0))
plt.plot(t, x)
plt.show()
if __name__ == '__main__':
test_euler()
|
<commit_before><commit_msg>Create Euler method, test it on a spring<commit_after>"""
Integration methods
"""
import numpy as np
import matplotlib.pyplot as plt
def euler(at, vt, xt, dt):
vtpdt = vt + at * dt
xtpdt = xt + vt * dt
return vtpdt, xtpdt
def test_euler():
# test with a spring force F = -kx
x0 = 0
v0 = 1
k = 100
dt = .01
t = np.arange(0, 5, dt)
x = [x0]
v = [v0]
for tt in t[1:]:
at = -k * x[-1]
vt, xt = euler(at, v[-1], x[-1], dt)
x.append(xt)
v.append(vt)
assert len(t) == len(x), "{}, {}".format(len(t), len(x))
plt.figure()
plt.title(r'Euler integration on a spring with $x_0$={}, $v_0$={}'
.format(x0, v0))
plt.plot(t, x)
plt.show()
if __name__ == '__main__':
test_euler()
|
|
cb28b07062c817d76f40bb5956316dabae907eb4
|
remove_duplicates_from_sorted_array_ii/solution.py
|
remove_duplicates_from_sorted_array_ii/solution.py
|
class Solution:
# @param A a list of integers
# @return an integer
def removeDuplicates(self, A):
if not A:
return 0
if len(A) == 1:
return 1
j = 0 # Position of last processed non-duplicate
n = len(A)
twice = False # Whether last processed non-duplicate appreared twice
for i in range(1, n):
# Duplicate is found
if A[i] == A[j] and not twice:
twice = True
j += 1
A[j] = A[i]
elif A[i] != A[j]:
j += 1
A[j] = A[i]
twice = False
return j + 1
|
Remove Duplicates from Sorted Array II
|
Remove Duplicates from Sorted Array II
|
Python
|
bsd-2-clause
|
quietcoolwu/leetcode-python,huanqi/leetcode-python,JiaminXuan/leetcode-python,shichao-an/leetcode-python,jamesblunt/leetcode-python,ibadguy/leetcode-python,jamesblunt/leetcode-python,shichao-an/leetcode-python,ibadguy/leetcode-python,beni55/leetcode-python,beni55/leetcode-python,huanqi/leetcode-python,quietcoolwu/leetcode-python,JiaminXuan/leetcode-python
|
Remove Duplicates from Sorted Array II
|
class Solution:
# @param A a list of integers
# @return an integer
def removeDuplicates(self, A):
if not A:
return 0
if len(A) == 1:
return 1
j = 0 # Position of last processed non-duplicate
n = len(A)
twice = False # Whether last processed non-duplicate appreared twice
for i in range(1, n):
# Duplicate is found
if A[i] == A[j] and not twice:
twice = True
j += 1
A[j] = A[i]
elif A[i] != A[j]:
j += 1
A[j] = A[i]
twice = False
return j + 1
|
<commit_before><commit_msg>Remove Duplicates from Sorted Array II<commit_after>
|
class Solution:
# @param A a list of integers
# @return an integer
def removeDuplicates(self, A):
if not A:
return 0
if len(A) == 1:
return 1
j = 0 # Position of last processed non-duplicate
n = len(A)
twice = False # Whether last processed non-duplicate appreared twice
for i in range(1, n):
# Duplicate is found
if A[i] == A[j] and not twice:
twice = True
j += 1
A[j] = A[i]
elif A[i] != A[j]:
j += 1
A[j] = A[i]
twice = False
return j + 1
|
Remove Duplicates from Sorted Array IIclass Solution:
# @param A a list of integers
# @return an integer
def removeDuplicates(self, A):
if not A:
return 0
if len(A) == 1:
return 1
j = 0 # Position of last processed non-duplicate
n = len(A)
twice = False # Whether last processed non-duplicate appreared twice
for i in range(1, n):
# Duplicate is found
if A[i] == A[j] and not twice:
twice = True
j += 1
A[j] = A[i]
elif A[i] != A[j]:
j += 1
A[j] = A[i]
twice = False
return j + 1
|
<commit_before><commit_msg>Remove Duplicates from Sorted Array II<commit_after>class Solution:
# @param A a list of integers
# @return an integer
def removeDuplicates(self, A):
if not A:
return 0
if len(A) == 1:
return 1
j = 0 # Position of last processed non-duplicate
n = len(A)
twice = False # Whether last processed non-duplicate appreared twice
for i in range(1, n):
# Duplicate is found
if A[i] == A[j] and not twice:
twice = True
j += 1
A[j] = A[i]
elif A[i] != A[j]:
j += 1
A[j] = A[i]
twice = False
return j + 1
|
|
01e8d98a7de2ac07942355bd41b88ea0cee14f1e
|
bluebottle/activities/migrations/0012_auto_20191108_1317.py
|
bluebottle/activities/migrations/0012_auto_20191108_1317.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 12:17
from __future__ import unicode_literals
from django.db import migrations
def correct_activity_status(apps, schema_editor):
Activity = apps.get_model('activities', 'Activity')
Activity.objects.filter(
review_status__in=('draft', 'submitted', 'needs_work')
).update(status='in_review')
Activity.objects.filter(
review_status='closed'
).update(status='closed')
class Migration(migrations.Migration):
dependencies = [
('activities', '0011_auto_20191028_1156'),
]
operations = [
migrations.RunPython(correct_activity_status)
]
|
Add migration that fixes status for activities that are not approved
|
Add migration that fixes status for activities that are not approved
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add migration that fixes status for activities that are not approved
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 12:17
from __future__ import unicode_literals
from django.db import migrations
def correct_activity_status(apps, schema_editor):
Activity = apps.get_model('activities', 'Activity')
Activity.objects.filter(
review_status__in=('draft', 'submitted', 'needs_work')
).update(status='in_review')
Activity.objects.filter(
review_status='closed'
).update(status='closed')
class Migration(migrations.Migration):
dependencies = [
('activities', '0011_auto_20191028_1156'),
]
operations = [
migrations.RunPython(correct_activity_status)
]
|
<commit_before><commit_msg>Add migration that fixes status for activities that are not approved<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 12:17
from __future__ import unicode_literals
from django.db import migrations
def correct_activity_status(apps, schema_editor):
Activity = apps.get_model('activities', 'Activity')
Activity.objects.filter(
review_status__in=('draft', 'submitted', 'needs_work')
).update(status='in_review')
Activity.objects.filter(
review_status='closed'
).update(status='closed')
class Migration(migrations.Migration):
dependencies = [
('activities', '0011_auto_20191028_1156'),
]
operations = [
migrations.RunPython(correct_activity_status)
]
|
Add migration that fixes status for activities that are not approved# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 12:17
from __future__ import unicode_literals
from django.db import migrations
def correct_activity_status(apps, schema_editor):
Activity = apps.get_model('activities', 'Activity')
Activity.objects.filter(
review_status__in=('draft', 'submitted', 'needs_work')
).update(status='in_review')
Activity.objects.filter(
review_status='closed'
).update(status='closed')
class Migration(migrations.Migration):
dependencies = [
('activities', '0011_auto_20191028_1156'),
]
operations = [
migrations.RunPython(correct_activity_status)
]
|
<commit_before><commit_msg>Add migration that fixes status for activities that are not approved<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-08 12:17
from __future__ import unicode_literals
from django.db import migrations
def correct_activity_status(apps, schema_editor):
Activity = apps.get_model('activities', 'Activity')
Activity.objects.filter(
review_status__in=('draft', 'submitted', 'needs_work')
).update(status='in_review')
Activity.objects.filter(
review_status='closed'
).update(status='closed')
class Migration(migrations.Migration):
dependencies = [
('activities', '0011_auto_20191028_1156'),
]
operations = [
migrations.RunPython(correct_activity_status)
]
|
|
e3272905eeb91d26e9603f682b4d485a1176d06a
|
14B-088/HI/HI_output_fits.py
|
14B-088/HI/HI_output_fits.py
|
# Convert the CASA cube into FITS
import os
from spectral_cube import SpectralCube
import astropy.units as u
from casa_tools import myexportfits
path = "/srv/astro/erickoch/M33/"
myexportfits(imagename=os.path.join(path, "M33_14B-088_HI.clean.image"),
fitsimage=os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
velocity=True, dropstokes=True, history=False)
cube = SpectralCube.read(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
mode='update')
converted_cube = cube.to(u.K, equivalencies=cube.beam.jtok_equiv(1.42040575177*u.GHz))
converted_cube.write(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
overwrite=True)
|
Convert to FITS and K
|
Convert to FITS and K
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
Convert to FITS and K
|
# Convert the CASA cube into FITS
import os
from spectral_cube import SpectralCube
import astropy.units as u
from casa_tools import myexportfits
path = "/srv/astro/erickoch/M33/"
myexportfits(imagename=os.path.join(path, "M33_14B-088_HI.clean.image"),
fitsimage=os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
velocity=True, dropstokes=True, history=False)
cube = SpectralCube.read(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
mode='update')
converted_cube = cube.to(u.K, equivalencies=cube.beam.jtok_equiv(1.42040575177*u.GHz))
converted_cube.write(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
overwrite=True)
|
<commit_before><commit_msg>Convert to FITS and K<commit_after>
|
# Convert the CASA cube into FITS
import os
from spectral_cube import SpectralCube
import astropy.units as u
from casa_tools import myexportfits
path = "/srv/astro/erickoch/M33/"
myexportfits(imagename=os.path.join(path, "M33_14B-088_HI.clean.image"),
fitsimage=os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
velocity=True, dropstokes=True, history=False)
cube = SpectralCube.read(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
mode='update')
converted_cube = cube.to(u.K, equivalencies=cube.beam.jtok_equiv(1.42040575177*u.GHz))
converted_cube.write(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
overwrite=True)
|
Convert to FITS and K
# Convert the CASA cube into FITS
import os
from spectral_cube import SpectralCube
import astropy.units as u
from casa_tools import myexportfits
path = "/srv/astro/erickoch/M33/"
myexportfits(imagename=os.path.join(path, "M33_14B-088_HI.clean.image"),
fitsimage=os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
velocity=True, dropstokes=True, history=False)
cube = SpectralCube.read(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
mode='update')
converted_cube = cube.to(u.K, equivalencies=cube.beam.jtok_equiv(1.42040575177*u.GHz))
converted_cube.write(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
overwrite=True)
|
<commit_before><commit_msg>Convert to FITS and K<commit_after>
# Convert the CASA cube into FITS
import os
from spectral_cube import SpectralCube
import astropy.units as u
from casa_tools import myexportfits
path = "/srv/astro/erickoch/M33/"
myexportfits(imagename=os.path.join(path, "M33_14B-088_HI.clean.image"),
fitsimage=os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
velocity=True, dropstokes=True, history=False)
cube = SpectralCube.read(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
mode='update')
converted_cube = cube.to(u.K, equivalencies=cube.beam.jtok_equiv(1.42040575177*u.GHz))
converted_cube.write(os.path.join(path, "M33_14B-088_HI.clean.image.fits"),
overwrite=True)
|
|
5aa8ce2b603a200b4d5ddf59b95f916645051af7
|
migrations/20141130-eliminate-duplicate-tags.py
|
migrations/20141130-eliminate-duplicate-tags.py
|
"""
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
|
Add migrations to compile duplicate tags
|
Add migrations to compile duplicate tags
|
Python
|
bsd-2-clause
|
Lancey6/redwind,Lancey6/redwind,thedod/redwind,Lancey6/redwind,thedod/redwind
|
Add migrations to compile duplicate tags
|
"""
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
|
<commit_before><commit_msg>Add migrations to compile duplicate tags<commit_after>
|
"""
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
|
Add migrations to compile duplicate tags"""
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
|
<commit_before><commit_msg>Add migrations to compile duplicate tags<commit_after>"""
"""
import os
import json
from sqlalchemy import (create_engine, Table, Column, String, Integer,
Float, Text, MetaData, select, ForeignKey,
bindparam, delete, and_)
from config import Configuration
engine = create_engine(Configuration.SQLALCHEMY_DATABASE_URI, echo=True)
metadata = MetaData()
tags = Table(
'tag', metadata,
Column('id', Integer, primary_key=True),
Column('name', String),
)
posts = Table(
'post', metadata,
Column('id', Integer, primary_key=True),
)
posts_to_tags = Table(
'posts_to_tags', metadata,
Column('tag_id', Integer, ForeignKey('tag.id')),
Column('post_id', Integer, ForeignKey('post.id')),
)
def eliminate_duplicates(conn):
tag_map = {}
update_batch = []
delete_batch = []
for row in conn.execute(
select([posts, tags]).select_from(
posts.join(posts_to_tags).join(tags)
).order_by(tags.c.id)):
post_id = row[0]
tag_id = row[1]
tag_name = row[2]
# possible duplicate
if tag_name in tag_map:
preexisting_tag_id = tag_map.get(tag_name)
if preexisting_tag_id != tag_id:
update_batch.append({
'the_post_id': post_id,
'old_tag_id': tag_id,
'new_tag_id': preexisting_tag_id,
})
delete_batch.append({
'the_tag_id': tag_id,
})
else:
tag_map[tag_name] = tag_id
print('update batch', update_batch)
if update_batch:
update_stmt = posts_to_tags.update().where(
and_(
posts_to_tags.c.post_id == bindparam('the_post_id'),
posts_to_tags.c.tag_id == bindparam('old_tag_id')
)
).values(tag_id=bindparam('new_tag_id'))
# print(update_stmt)
# print(update_batch)
conn.execute(update_stmt, update_batch)
print('delete batch', delete_batch)
if delete_batch:
delete_stmt = tags.delete().where(tags.c.id == bindparam('the_tag_id'))
# print(delete_stmt)
conn.execute(delete_stmt, delete_batch)
with engine.begin() as conn:
eliminate_duplicates(conn)
|
|
d411a3cdc16b68647059c007cff3091ecf3ed9dc
|
gaia_tools/query/__init__.py
|
gaia_tools/query/__init__.py
|
# gaia_tools.query: some helper functions for querying the Gaia database
import numpy
from astropy.table import Table
from astroquery.gaia import Gaia
import psycopg2
def query(sql_query,local=False,dbname='catalogs',user='postgres'):
"""
NAME:
query
PURPOSE:
perform a query, either on a local server or on the Gaia archive
INPUT:
sql_query - the text of the query
OUTPUT:
result
HISTORY:
2018-05-02 - Written - Bovy (UofT)
"""
if local and 'gaiadr2.' in sql_query:
sql_query= sql_query.replace('gaiadr2.','gdr2_')
elif not local and 'gdr2_' in sql_query:
sql_query= sql_query.replace('gdr2_','gaiadr2.')
if local:
conn= psycopg2.connect("dbname={} user={}".format(dbname,user))
cur= conn.cursor()
cur.execute(sql_query)
out= cur.fetchall()
names= [desc[0] for desc in cur.description]
cur.close()
conn.close()
out= Table(numpy.array(out),names=names)
else:
job= Gaia.launch_job_async(sql_query)
out= job.get_results()
return out
|
Add function to run a local or remote query
|
Add function to run a local or remote query
|
Python
|
mit
|
jobovy/gaia_tools
|
Add function to run a local or remote query
|
# gaia_tools.query: some helper functions for querying the Gaia database
import numpy
from astropy.table import Table
from astroquery.gaia import Gaia
import psycopg2
def query(sql_query,local=False,dbname='catalogs',user='postgres'):
"""
NAME:
query
PURPOSE:
perform a query, either on a local server or on the Gaia archive
INPUT:
sql_query - the text of the query
OUTPUT:
result
HISTORY:
2018-05-02 - Written - Bovy (UofT)
"""
if local and 'gaiadr2.' in sql_query:
sql_query= sql_query.replace('gaiadr2.','gdr2_')
elif not local and 'gdr2_' in sql_query:
sql_query= sql_query.replace('gdr2_','gaiadr2.')
if local:
conn= psycopg2.connect("dbname={} user={}".format(dbname,user))
cur= conn.cursor()
cur.execute(sql_query)
out= cur.fetchall()
names= [desc[0] for desc in cur.description]
cur.close()
conn.close()
out= Table(numpy.array(out),names=names)
else:
job= Gaia.launch_job_async(sql_query)
out= job.get_results()
return out
|
<commit_before><commit_msg>Add function to run a local or remote query<commit_after>
|
# gaia_tools.query: some helper functions for querying the Gaia database
import numpy
from astropy.table import Table
from astroquery.gaia import Gaia
import psycopg2
def query(sql_query,local=False,dbname='catalogs',user='postgres'):
"""
NAME:
query
PURPOSE:
perform a query, either on a local server or on the Gaia archive
INPUT:
sql_query - the text of the query
OUTPUT:
result
HISTORY:
2018-05-02 - Written - Bovy (UofT)
"""
if local and 'gaiadr2.' in sql_query:
sql_query= sql_query.replace('gaiadr2.','gdr2_')
elif not local and 'gdr2_' in sql_query:
sql_query= sql_query.replace('gdr2_','gaiadr2.')
if local:
conn= psycopg2.connect("dbname={} user={}".format(dbname,user))
cur= conn.cursor()
cur.execute(sql_query)
out= cur.fetchall()
names= [desc[0] for desc in cur.description]
cur.close()
conn.close()
out= Table(numpy.array(out),names=names)
else:
job= Gaia.launch_job_async(sql_query)
out= job.get_results()
return out
|
Add function to run a local or remote query# gaia_tools.query: some helper functions for querying the Gaia database
import numpy
from astropy.table import Table
from astroquery.gaia import Gaia
import psycopg2
def query(sql_query,local=False,dbname='catalogs',user='postgres'):
"""
NAME:
query
PURPOSE:
perform a query, either on a local server or on the Gaia archive
INPUT:
sql_query - the text of the query
OUTPUT:
result
HISTORY:
2018-05-02 - Written - Bovy (UofT)
"""
if local and 'gaiadr2.' in sql_query:
sql_query= sql_query.replace('gaiadr2.','gdr2_')
elif not local and 'gdr2_' in sql_query:
sql_query= sql_query.replace('gdr2_','gaiadr2.')
if local:
conn= psycopg2.connect("dbname={} user={}".format(dbname,user))
cur= conn.cursor()
cur.execute(sql_query)
out= cur.fetchall()
names= [desc[0] for desc in cur.description]
cur.close()
conn.close()
out= Table(numpy.array(out),names=names)
else:
job= Gaia.launch_job_async(sql_query)
out= job.get_results()
return out
|
<commit_before><commit_msg>Add function to run a local or remote query<commit_after># gaia_tools.query: some helper functions for querying the Gaia database
import numpy
from astropy.table import Table
from astroquery.gaia import Gaia
import psycopg2
def query(sql_query,local=False,dbname='catalogs',user='postgres'):
"""
NAME:
query
PURPOSE:
perform a query, either on a local server or on the Gaia archive
INPUT:
sql_query - the text of the query
OUTPUT:
result
HISTORY:
2018-05-02 - Written - Bovy (UofT)
"""
if local and 'gaiadr2.' in sql_query:
sql_query= sql_query.replace('gaiadr2.','gdr2_')
elif not local and 'gdr2_' in sql_query:
sql_query= sql_query.replace('gdr2_','gaiadr2.')
if local:
conn= psycopg2.connect("dbname={} user={}".format(dbname,user))
cur= conn.cursor()
cur.execute(sql_query)
out= cur.fetchall()
names= [desc[0] for desc in cur.description]
cur.close()
conn.close()
out= Table(numpy.array(out),names=names)
else:
job= Gaia.launch_job_async(sql_query)
out= job.get_results()
return out
|
|
d13e9ab0875844bddc1f455935a48091544c6937
|
apps/people/migrations/0004_people_per_page.py
|
apps/people/migrations/0004_people_per_page.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0003_people_standfirst'),
]
operations = [
migrations.AddField(
model_name='people',
name='per_page',
field=models.IntegerField(default=5, null=True, verbose_name=b'people per page', blank=True),
preserve_default=True,
),
]
|
Add missing migration file for `per_page` field.
|
Add missing migration file for `per_page` field.
|
Python
|
mit
|
onespacemedia/cms-people,onespacemedia/cms-people
|
Add missing migration file for `per_page` field.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0003_people_standfirst'),
]
operations = [
migrations.AddField(
model_name='people',
name='per_page',
field=models.IntegerField(default=5, null=True, verbose_name=b'people per page', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration file for `per_page` field.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0003_people_standfirst'),
]
operations = [
migrations.AddField(
model_name='people',
name='per_page',
field=models.IntegerField(default=5, null=True, verbose_name=b'people per page', blank=True),
preserve_default=True,
),
]
|
Add missing migration file for `per_page` field.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0003_people_standfirst'),
]
operations = [
migrations.AddField(
model_name='people',
name='per_page',
field=models.IntegerField(default=5, null=True, verbose_name=b'people per page', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add missing migration file for `per_page` field.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('people', '0003_people_standfirst'),
]
operations = [
migrations.AddField(
model_name='people',
name='per_page',
field=models.IntegerField(default=5, null=True, verbose_name=b'people per page', blank=True),
preserve_default=True,
),
]
|
|
ade82c81d43473eaef4fb3db668353f76c0daf91
|
messaging/synonyms.py
|
messaging/synonyms.py
|
import os
from threading import RLock
from weavelib.exceptions import ObjectAlreadyExists
class SynonymRegistry(object):
def __init__(self):
self.synonym_lock = RLock()
self.synonyms = {}
def register(self, synonym, target):
synonym = os.path.join("/synonyms", synonym.lstrip('/'))
with self.synonym_lock:
if synonym in self.synonyms:
raise ObjecObjectAlreadyExists(synonym)
self.synonyms[synonym] = target
return synonym
def translate(self, synonym):
with self.synonym_lock:
return self.synonyms.get(synonym, synonym)
|
Add a simple synonym-map class.
|
Add a simple synonym-map class.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
Add a simple synonym-map class.
|
import os
from threading import RLock
from weavelib.exceptions import ObjectAlreadyExists
class SynonymRegistry(object):
def __init__(self):
self.synonym_lock = RLock()
self.synonyms = {}
def register(self, synonym, target):
synonym = os.path.join("/synonyms", synonym.lstrip('/'))
with self.synonym_lock:
if synonym in self.synonyms:
raise ObjecObjectAlreadyExists(synonym)
self.synonyms[synonym] = target
return synonym
def translate(self, synonym):
with self.synonym_lock:
return self.synonyms.get(synonym, synonym)
|
<commit_before><commit_msg>Add a simple synonym-map class.<commit_after>
|
import os
from threading import RLock
from weavelib.exceptions import ObjectAlreadyExists
class SynonymRegistry(object):
def __init__(self):
self.synonym_lock = RLock()
self.synonyms = {}
def register(self, synonym, target):
synonym = os.path.join("/synonyms", synonym.lstrip('/'))
with self.synonym_lock:
if synonym in self.synonyms:
raise ObjecObjectAlreadyExists(synonym)
self.synonyms[synonym] = target
return synonym
def translate(self, synonym):
with self.synonym_lock:
return self.synonyms.get(synonym, synonym)
|
Add a simple synonym-map class.import os
from threading import RLock
from weavelib.exceptions import ObjectAlreadyExists
class SynonymRegistry(object):
def __init__(self):
self.synonym_lock = RLock()
self.synonyms = {}
def register(self, synonym, target):
synonym = os.path.join("/synonyms", synonym.lstrip('/'))
with self.synonym_lock:
if synonym in self.synonyms:
raise ObjecObjectAlreadyExists(synonym)
self.synonyms[synonym] = target
return synonym
def translate(self, synonym):
with self.synonym_lock:
return self.synonyms.get(synonym, synonym)
|
<commit_before><commit_msg>Add a simple synonym-map class.<commit_after>import os
from threading import RLock
from weavelib.exceptions import ObjectAlreadyExists
class SynonymRegistry(object):
def __init__(self):
self.synonym_lock = RLock()
self.synonyms = {}
def register(self, synonym, target):
synonym = os.path.join("/synonyms", synonym.lstrip('/'))
with self.synonym_lock:
if synonym in self.synonyms:
raise ObjecObjectAlreadyExists(synonym)
self.synonyms[synonym] = target
return synonym
def translate(self, synonym):
with self.synonym_lock:
return self.synonyms.get(synonym, synonym)
|
|
4190937c741f56bb4bb8b81621a711bba03fe705
|
imhotep/shas.py
|
imhotep/shas.py
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
return Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
def to_commit_info(self):
remote_repo = None
if self.has_remote_repo:
remote_repo = self.remote_repo
return CommitInfo(self.base_sha, self.head_sha, remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
remote = None
if self.has_remote_repo:
remote = Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
return remote
def to_commit_info(self):
return CommitInfo(self.base_sha, self.head_sha, self.remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
Refactor remote_repo, to return None if there is no remote.
|
Refactor remote_repo, to return None
if there is no remote.
|
Python
|
mit
|
richtier/imhotep,justinabrahms/imhotep,justinabrahms/imhotep,Appdynamics/imhotep
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
return Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
def to_commit_info(self):
remote_repo = None
if self.has_remote_repo:
remote_repo = self.remote_repo
return CommitInfo(self.base_sha, self.head_sha, remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
Refactor remote_repo, to return None
if there is no remote.
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
remote = None
if self.has_remote_repo:
remote = Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
return remote
def to_commit_info(self):
return CommitInfo(self.base_sha, self.head_sha, self.remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
<commit_before>from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
return Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
def to_commit_info(self):
remote_repo = None
if self.has_remote_repo:
remote_repo = self.remote_repo
return CommitInfo(self.base_sha, self.head_sha, remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
<commit_msg>Refactor remote_repo, to return None
if there is no remote.<commit_after>
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
remote = None
if self.has_remote_repo:
remote = Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
return remote
def to_commit_info(self):
return CommitInfo(self.base_sha, self.head_sha, self.remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
return Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
def to_commit_info(self):
remote_repo = None
if self.has_remote_repo:
remote_repo = self.remote_repo
return CommitInfo(self.base_sha, self.head_sha, remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
Refactor remote_repo, to return None
if there is no remote.from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
remote = None
if self.has_remote_repo:
remote = Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
return remote
def to_commit_info(self):
return CommitInfo(self.base_sha, self.head_sha, self.remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
<commit_before>from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
return Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
def to_commit_info(self):
remote_repo = None
if self.has_remote_repo:
remote_repo = self.remote_repo
return CommitInfo(self.base_sha, self.head_sha, remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
<commit_msg>Refactor remote_repo, to return None
if there is no remote.<commit_after>from collections import namedtuple
Remote = namedtuple('Remote', ('name', 'url'))
CommitInfo = namedtuple("CommitInfo", ('commit', 'origin', 'remote_repo'))
class PRInfo(object):
def __init__(self, json):
self.json = json
@property
def base_sha(self):
return self.json['base']['sha']
@property
def head_sha(self):
return self.json['head']['sha']
@property
def has_remote_repo(self):
return self.json['base']['repo']['owner']['login'] != \
self.json['head']['repo']['owner']['login']
@property
def remote_repo(self):
remote = None
if self.has_remote_repo:
remote = Remote(name=self.json['head']['repo']['owner']['login'],
url=self.json['head']['repo']['clone_url'])
return remote
def to_commit_info(self):
return CommitInfo(self.base_sha, self.head_sha, self.remote_repo)
def get_pr_info(requester, reponame, number):
"Returns the PullRequest as a PRInfo object"
resp = requester.get(
'https://api.github.com/repos/%s/pulls/%s' % (reponame, number))
return PRInfo(resp.json)
|
75a8f8e7181188e174fc707875117f49c461ee97
|
open_spiel/python/examples/uniform_policy_exploitability.py
|
open_spiel/python/examples/uniform_policy_exploitability.py
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example use of computing exploitability of a uniform policy."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import exploitability
from open_spiel.python.policy import UniformRandomPolicy
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
def main(_):
game = pyspiel.load_game(FLAGS.game)
expl = exploitability.exploitability(game, UniformRandomPolicy(game))
print("Exploitability: {}".format(expl))
if __name__ == "__main__":
app.run(main)
|
Add a simple example of computing exploitability of uniform policy.
|
Add a simple example of computing exploitability of uniform policy.
|
Python
|
apache-2.0
|
deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel
|
Add a simple example of computing exploitability of uniform policy.
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example use of computing exploitability of a uniform policy."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import exploitability
from open_spiel.python.policy import UniformRandomPolicy
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
def main(_):
game = pyspiel.load_game(FLAGS.game)
expl = exploitability.exploitability(game, UniformRandomPolicy(game))
print("Exploitability: {}".format(expl))
if __name__ == "__main__":
app.run(main)
|
<commit_before><commit_msg>Add a simple example of computing exploitability of uniform policy.<commit_after>
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example use of computing exploitability of a uniform policy."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import exploitability
from open_spiel.python.policy import UniformRandomPolicy
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
def main(_):
game = pyspiel.load_game(FLAGS.game)
expl = exploitability.exploitability(game, UniformRandomPolicy(game))
print("Exploitability: {}".format(expl))
if __name__ == "__main__":
app.run(main)
|
Add a simple example of computing exploitability of uniform policy.# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example use of computing exploitability of a uniform policy."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import exploitability
from open_spiel.python.policy import UniformRandomPolicy
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
def main(_):
game = pyspiel.load_game(FLAGS.game)
expl = exploitability.exploitability(game, UniformRandomPolicy(game))
print("Exploitability: {}".format(expl))
if __name__ == "__main__":
app.run(main)
|
<commit_before><commit_msg>Add a simple example of computing exploitability of uniform policy.<commit_after># Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example use of computing exploitability of a uniform policy."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import exploitability
from open_spiel.python.policy import UniformRandomPolicy
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
def main(_):
game = pyspiel.load_game(FLAGS.game)
expl = exploitability.exploitability(game, UniformRandomPolicy(game))
print("Exploitability: {}".format(expl))
if __name__ == "__main__":
app.run(main)
|
|
42b5369a1608417b6503d914b90d14945514bff8
|
contrib/examples/tests/test_action_isprime.py
|
contrib/examples/tests/test_action_isprime.py
|
from st2tests.base import BaseActionTestCase
from pythonactions.isprime import PrimeCheckerAction
class PrimeCheckerActionTestCase(BaseActionTestCase):
action_cls = PrimeCheckerAction
def test_run(self):
action = self.get_action_instance()
result = action.run(value=1)
self.assertFalse(result)
result = action.run(value=3)
self.assertTrue(result)
|
Add tests for example is prime action.
|
Add tests for example is prime action.
|
Python
|
apache-2.0
|
nzlosh/st2,punalpatel/st2,dennybaa/st2,pixelrebel/st2,StackStorm/st2,emedvedev/st2,punalpatel/st2,dennybaa/st2,lakshmi-kannan/st2,emedvedev/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,punalpatel/st2,lakshmi-kannan/st2,Plexxi/st2,lakshmi-kannan/st2,peak6/st2,peak6/st2,StackStorm/st2,emedvedev/st2,Plexxi/st2,tonybaloney/st2,pixelrebel/st2,StackStorm/st2,nzlosh/st2,dennybaa/st2,tonybaloney/st2,pixelrebel/st2,peak6/st2,tonybaloney/st2,nzlosh/st2,Plexxi/st2
|
Add tests for example is prime action.
|
from st2tests.base import BaseActionTestCase
from pythonactions.isprime import PrimeCheckerAction
class PrimeCheckerActionTestCase(BaseActionTestCase):
action_cls = PrimeCheckerAction
def test_run(self):
action = self.get_action_instance()
result = action.run(value=1)
self.assertFalse(result)
result = action.run(value=3)
self.assertTrue(result)
|
<commit_before><commit_msg>Add tests for example is prime action.<commit_after>
|
from st2tests.base import BaseActionTestCase
from pythonactions.isprime import PrimeCheckerAction
class PrimeCheckerActionTestCase(BaseActionTestCase):
action_cls = PrimeCheckerAction
def test_run(self):
action = self.get_action_instance()
result = action.run(value=1)
self.assertFalse(result)
result = action.run(value=3)
self.assertTrue(result)
|
Add tests for example is prime action.from st2tests.base import BaseActionTestCase
from pythonactions.isprime import PrimeCheckerAction
class PrimeCheckerActionTestCase(BaseActionTestCase):
action_cls = PrimeCheckerAction
def test_run(self):
action = self.get_action_instance()
result = action.run(value=1)
self.assertFalse(result)
result = action.run(value=3)
self.assertTrue(result)
|
<commit_before><commit_msg>Add tests for example is prime action.<commit_after>from st2tests.base import BaseActionTestCase
from pythonactions.isprime import PrimeCheckerAction
class PrimeCheckerActionTestCase(BaseActionTestCase):
action_cls = PrimeCheckerAction
def test_run(self):
action = self.get_action_instance()
result = action.run(value=1)
self.assertFalse(result)
result = action.run(value=3)
self.assertTrue(result)
|
|
9ec01f7d01d77aaf7ee934725b9d7994b8157bcf
|
drftest/shop/tests/test_models.py
|
drftest/shop/tests/test_models.py
|
from django.test import TestCase
from shop.models import Order
from .factories import UserFactory, OrderFactory
class ShopModelsTestCase(TestCase):
def setUp(self):
pass
def test_order_add(self):
order = OrderFactory.create()
self.assertEquals(Order.objects.count(), 1)
|
Add initial tests for models and factories
|
Add initial tests for models and factories
|
Python
|
mit
|
andreagrandi/drf3-test,andreagrandi/drf3-test,andreagrandi/drf3-test
|
Add initial tests for models and factories
|
from django.test import TestCase
from shop.models import Order
from .factories import UserFactory, OrderFactory
class ShopModelsTestCase(TestCase):
def setUp(self):
pass
def test_order_add(self):
order = OrderFactory.create()
self.assertEquals(Order.objects.count(), 1)
|
<commit_before><commit_msg>Add initial tests for models and factories<commit_after>
|
from django.test import TestCase
from shop.models import Order
from .factories import UserFactory, OrderFactory
class ShopModelsTestCase(TestCase):
def setUp(self):
pass
def test_order_add(self):
order = OrderFactory.create()
self.assertEquals(Order.objects.count(), 1)
|
Add initial tests for models and factoriesfrom django.test import TestCase
from shop.models import Order
from .factories import UserFactory, OrderFactory
class ShopModelsTestCase(TestCase):
def setUp(self):
pass
def test_order_add(self):
order = OrderFactory.create()
self.assertEquals(Order.objects.count(), 1)
|
<commit_before><commit_msg>Add initial tests for models and factories<commit_after>from django.test import TestCase
from shop.models import Order
from .factories import UserFactory, OrderFactory
class ShopModelsTestCase(TestCase):
def setUp(self):
pass
def test_order_add(self):
order = OrderFactory.create()
self.assertEquals(Order.objects.count(), 1)
|
|
5b3218ef7d824df3aa61d7e4bceecb566ecf8ccf
|
examples/c4_ql_tabular_selfplay.py
|
examples/c4_ql_tabular_selfplay.py
|
'''
Q-learning via self-play is used to learn the state-action values, Q(s, a),
for the legal moves of a Connect 4 position.
'''
from capstone.environment import Environment
from capstone.game import Connect4
from capstone.mdp import GameMDP
from capstone.rl import QLearningSelfPlay
from capstone.util import c42pdf
board = [['X', 'O', 'X', 'O', ' ', ' ', ' '],
['X', 'O', 'X', 'O', ' ', ' ', ' '],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['X', 'O', 'X', 'O', 'X', 'O', 'X'],
['X', 'O', 'X', 'O', 'X', 'O', 'X']]
game = Connect4(board)
mdp = GameMDP(game)
env = Environment(mdp)
qlearning = QLearningSelfPlay(env, n_episodes=1000)
qlearning.learn()
c42pdf('figures/c4_ql_tabular_selfplay_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qf[(game, move)]
print('Move: %s' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/c4_ql_tabular_selfplay_move_%s_value_%.4f.pdf' % (move, value)
c42pdf(filename, new_game.board)
|
Add Connect4 Q-learning tabular self-play example
|
Add Connect4 Q-learning tabular self-play example
|
Python
|
mit
|
davidrobles/mlnd-capstone-code
|
Add Connect4 Q-learning tabular self-play example
|
'''
Q-learning via self-play is used to learn the state-action values, Q(s, a),
for the legal moves of a Connect 4 position.
'''
from capstone.environment import Environment
from capstone.game import Connect4
from capstone.mdp import GameMDP
from capstone.rl import QLearningSelfPlay
from capstone.util import c42pdf
board = [['X', 'O', 'X', 'O', ' ', ' ', ' '],
['X', 'O', 'X', 'O', ' ', ' ', ' '],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['X', 'O', 'X', 'O', 'X', 'O', 'X'],
['X', 'O', 'X', 'O', 'X', 'O', 'X']]
game = Connect4(board)
mdp = GameMDP(game)
env = Environment(mdp)
qlearning = QLearningSelfPlay(env, n_episodes=1000)
qlearning.learn()
c42pdf('figures/c4_ql_tabular_selfplay_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qf[(game, move)]
print('Move: %s' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/c4_ql_tabular_selfplay_move_%s_value_%.4f.pdf' % (move, value)
c42pdf(filename, new_game.board)
|
<commit_before><commit_msg>Add Connect4 Q-learning tabular self-play example<commit_after>
|
'''
Q-learning via self-play is used to learn the state-action values, Q(s, a),
for the legal moves of a Connect 4 position.
'''
from capstone.environment import Environment
from capstone.game import Connect4
from capstone.mdp import GameMDP
from capstone.rl import QLearningSelfPlay
from capstone.util import c42pdf
board = [['X', 'O', 'X', 'O', ' ', ' ', ' '],
['X', 'O', 'X', 'O', ' ', ' ', ' '],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['X', 'O', 'X', 'O', 'X', 'O', 'X'],
['X', 'O', 'X', 'O', 'X', 'O', 'X']]
game = Connect4(board)
mdp = GameMDP(game)
env = Environment(mdp)
qlearning = QLearningSelfPlay(env, n_episodes=1000)
qlearning.learn()
c42pdf('figures/c4_ql_tabular_selfplay_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qf[(game, move)]
print('Move: %s' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/c4_ql_tabular_selfplay_move_%s_value_%.4f.pdf' % (move, value)
c42pdf(filename, new_game.board)
|
Add Connect4 Q-learning tabular self-play example'''
Q-learning via self-play is used to learn the state-action values, Q(s, a),
for the legal moves of a Connect 4 position.
'''
from capstone.environment import Environment
from capstone.game import Connect4
from capstone.mdp import GameMDP
from capstone.rl import QLearningSelfPlay
from capstone.util import c42pdf
board = [['X', 'O', 'X', 'O', ' ', ' ', ' '],
['X', 'O', 'X', 'O', ' ', ' ', ' '],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['X', 'O', 'X', 'O', 'X', 'O', 'X'],
['X', 'O', 'X', 'O', 'X', 'O', 'X']]
game = Connect4(board)
mdp = GameMDP(game)
env = Environment(mdp)
qlearning = QLearningSelfPlay(env, n_episodes=1000)
qlearning.learn()
c42pdf('figures/c4_ql_tabular_selfplay_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qf[(game, move)]
print('Move: %s' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/c4_ql_tabular_selfplay_move_%s_value_%.4f.pdf' % (move, value)
c42pdf(filename, new_game.board)
|
<commit_before><commit_msg>Add Connect4 Q-learning tabular self-play example<commit_after>'''
Q-learning via self-play is used to learn the state-action values, Q(s, a),
for the legal moves of a Connect 4 position.
'''
from capstone.environment import Environment
from capstone.game import Connect4
from capstone.mdp import GameMDP
from capstone.rl import QLearningSelfPlay
from capstone.util import c42pdf
board = [['X', 'O', 'X', 'O', ' ', ' ', ' '],
['X', 'O', 'X', 'O', ' ', ' ', ' '],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['O', 'X', 'O', 'X', 'O', 'X', 'O'],
['X', 'O', 'X', 'O', 'X', 'O', 'X'],
['X', 'O', 'X', 'O', 'X', 'O', 'X']]
game = Connect4(board)
mdp = GameMDP(game)
env = Environment(mdp)
qlearning = QLearningSelfPlay(env, n_episodes=1000)
qlearning.learn()
c42pdf('figures/c4_ql_tabular_selfplay_current.pdf', game.board)
for move in game.legal_moves():
print('*' * 80)
value = qlearning.qf[(game, move)]
print('Move: %s' % move)
print('Value: %f' % value)
new_game = game.copy().make_move(move)
print(new_game)
filename = 'figures/c4_ql_tabular_selfplay_move_%s_value_%.4f.pdf' % (move, value)
c42pdf(filename, new_game.board)
|
|
912c03ddbb533ddcc7e3d256f8c7871f2a04698e
|
apps/core/migrations/0007_auto_20171106_1318.py
|
apps/core/migrations/0007_auto_20171106_1318.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-06 13:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171017_1257'),
]
operations = [
migrations.AlterModelOptions(
name='pixeler',
options={'verbose_name': 'Pixeler', 'verbose_name_plural': 'Pixelers'},
),
]
|
Add missing migration for Pixeler verbose name(s)
|
Add missing migration for Pixeler verbose name(s)
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
Add missing migration for Pixeler verbose name(s)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-06 13:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171017_1257'),
]
operations = [
migrations.AlterModelOptions(
name='pixeler',
options={'verbose_name': 'Pixeler', 'verbose_name_plural': 'Pixelers'},
),
]
|
<commit_before><commit_msg>Add missing migration for Pixeler verbose name(s)<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-06 13:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171017_1257'),
]
operations = [
migrations.AlterModelOptions(
name='pixeler',
options={'verbose_name': 'Pixeler', 'verbose_name_plural': 'Pixelers'},
),
]
|
Add missing migration for Pixeler verbose name(s)# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-06 13:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171017_1257'),
]
operations = [
migrations.AlterModelOptions(
name='pixeler',
options={'verbose_name': 'Pixeler', 'verbose_name_plural': 'Pixelers'},
),
]
|
<commit_before><commit_msg>Add missing migration for Pixeler verbose name(s)<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-06 13:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20171017_1257'),
]
operations = [
migrations.AlterModelOptions(
name='pixeler',
options={'verbose_name': 'Pixeler', 'verbose_name_plural': 'Pixelers'},
),
]
|
|
142cd3a29155dadc3ed2e7bc0def7a63632f0ebb
|
ooni/nettests/manipulation/parasitictraceroute.py
|
ooni/nettests/manipulation/parasitictraceroute.py
|
from twisted.python import usage
from twisted.internet import defer, reactor
from ooni.templates import scapyt
from ooni.utils import log
from ooni.utils.txscapy import ParasiticTraceroute
from ooni.settings import config
from scapy.all import TCPerror, IPerror
class ParasiticTracerouteTest(scapyt.BaseScapyTest):
name = "Parasitic Traceroute Test"
description = "Injects duplicate TCP packets with varying TTL values by sniffing traffic"
version = '0.1'
samplePeriod = 40
def setUp(self):
self.report['parasitic_traceroute'] = {}
def test_parasitic_traceroute(self):
self.pt = ParasiticTraceroute()
config.scapyFactory.registerProtocol(self.pt)
d = defer.Deferred()
reactor.callLater(self.samplePeriod, d.callback, self.pt)
return d
def postProcessor(self, *args, **kwargs):
self.pt.stopListening()
self.report['received_packets'] = self.pt.received_packets
for packet in self.pt.received_packets:
k = (packet[IPerror].id, packet[TCPerror].sport, packet[TCPerror].dport, packet[TCPerror].seq)
if k in self.pt.matched_packets:
ttl = self.pt.matched_packets[k]['ttl']
else:
ttl = 'Unknown'
hop = (ttl, packet.src)
path = 'hops_%s' % packet[IPerror].dst
if path in self.report['parasitic_traceroute']:
self.report['parasitic_traceroute'][path].append(hop)
else:
self.report['parasitic_traceroute'][path] = [hop]
for p in self.report['parasitic_traceroute'].keys():
self.report['parasitic_traceroute'][p].sort(key=lambda x: x[0])
self.report['sent_packets'] = self.pt.sent_packets
return self.report
|
Add a Parasitic Traceroute NetTest
|
Add a Parasitic Traceroute NetTest
|
Python
|
bsd-2-clause
|
kdmurray91/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe
|
Add a Parasitic Traceroute NetTest
|
from twisted.python import usage
from twisted.internet import defer, reactor
from ooni.templates import scapyt
from ooni.utils import log
from ooni.utils.txscapy import ParasiticTraceroute
from ooni.settings import config
from scapy.all import TCPerror, IPerror
class ParasiticTracerouteTest(scapyt.BaseScapyTest):
name = "Parasitic Traceroute Test"
description = "Injects duplicate TCP packets with varying TTL values by sniffing traffic"
version = '0.1'
samplePeriod = 40
def setUp(self):
self.report['parasitic_traceroute'] = {}
def test_parasitic_traceroute(self):
self.pt = ParasiticTraceroute()
config.scapyFactory.registerProtocol(self.pt)
d = defer.Deferred()
reactor.callLater(self.samplePeriod, d.callback, self.pt)
return d
def postProcessor(self, *args, **kwargs):
self.pt.stopListening()
self.report['received_packets'] = self.pt.received_packets
for packet in self.pt.received_packets:
k = (packet[IPerror].id, packet[TCPerror].sport, packet[TCPerror].dport, packet[TCPerror].seq)
if k in self.pt.matched_packets:
ttl = self.pt.matched_packets[k]['ttl']
else:
ttl = 'Unknown'
hop = (ttl, packet.src)
path = 'hops_%s' % packet[IPerror].dst
if path in self.report['parasitic_traceroute']:
self.report['parasitic_traceroute'][path].append(hop)
else:
self.report['parasitic_traceroute'][path] = [hop]
for p in self.report['parasitic_traceroute'].keys():
self.report['parasitic_traceroute'][p].sort(key=lambda x: x[0])
self.report['sent_packets'] = self.pt.sent_packets
return self.report
|
<commit_before><commit_msg>Add a Parasitic Traceroute NetTest<commit_after>
|
from twisted.python import usage
from twisted.internet import defer, reactor
from ooni.templates import scapyt
from ooni.utils import log
from ooni.utils.txscapy import ParasiticTraceroute
from ooni.settings import config
from scapy.all import TCPerror, IPerror
class ParasiticTracerouteTest(scapyt.BaseScapyTest):
name = "Parasitic Traceroute Test"
description = "Injects duplicate TCP packets with varying TTL values by sniffing traffic"
version = '0.1'
samplePeriod = 40
def setUp(self):
self.report['parasitic_traceroute'] = {}
def test_parasitic_traceroute(self):
self.pt = ParasiticTraceroute()
config.scapyFactory.registerProtocol(self.pt)
d = defer.Deferred()
reactor.callLater(self.samplePeriod, d.callback, self.pt)
return d
def postProcessor(self, *args, **kwargs):
self.pt.stopListening()
self.report['received_packets'] = self.pt.received_packets
for packet in self.pt.received_packets:
k = (packet[IPerror].id, packet[TCPerror].sport, packet[TCPerror].dport, packet[TCPerror].seq)
if k in self.pt.matched_packets:
ttl = self.pt.matched_packets[k]['ttl']
else:
ttl = 'Unknown'
hop = (ttl, packet.src)
path = 'hops_%s' % packet[IPerror].dst
if path in self.report['parasitic_traceroute']:
self.report['parasitic_traceroute'][path].append(hop)
else:
self.report['parasitic_traceroute'][path] = [hop]
for p in self.report['parasitic_traceroute'].keys():
self.report['parasitic_traceroute'][p].sort(key=lambda x: x[0])
self.report['sent_packets'] = self.pt.sent_packets
return self.report
|
Add a Parasitic Traceroute NetTestfrom twisted.python import usage
from twisted.internet import defer, reactor
from ooni.templates import scapyt
from ooni.utils import log
from ooni.utils.txscapy import ParasiticTraceroute
from ooni.settings import config
from scapy.all import TCPerror, IPerror
class ParasiticTracerouteTest(scapyt.BaseScapyTest):
name = "Parasitic Traceroute Test"
description = "Injects duplicate TCP packets with varying TTL values by sniffing traffic"
version = '0.1'
samplePeriod = 40
def setUp(self):
self.report['parasitic_traceroute'] = {}
def test_parasitic_traceroute(self):
self.pt = ParasiticTraceroute()
config.scapyFactory.registerProtocol(self.pt)
d = defer.Deferred()
reactor.callLater(self.samplePeriod, d.callback, self.pt)
return d
def postProcessor(self, *args, **kwargs):
self.pt.stopListening()
self.report['received_packets'] = self.pt.received_packets
for packet in self.pt.received_packets:
k = (packet[IPerror].id, packet[TCPerror].sport, packet[TCPerror].dport, packet[TCPerror].seq)
if k in self.pt.matched_packets:
ttl = self.pt.matched_packets[k]['ttl']
else:
ttl = 'Unknown'
hop = (ttl, packet.src)
path = 'hops_%s' % packet[IPerror].dst
if path in self.report['parasitic_traceroute']:
self.report['parasitic_traceroute'][path].append(hop)
else:
self.report['parasitic_traceroute'][path] = [hop]
for p in self.report['parasitic_traceroute'].keys():
self.report['parasitic_traceroute'][p].sort(key=lambda x: x[0])
self.report['sent_packets'] = self.pt.sent_packets
return self.report
|
<commit_before><commit_msg>Add a Parasitic Traceroute NetTest<commit_after>from twisted.python import usage
from twisted.internet import defer, reactor
from ooni.templates import scapyt
from ooni.utils import log
from ooni.utils.txscapy import ParasiticTraceroute
from ooni.settings import config
from scapy.all import TCPerror, IPerror
class ParasiticTracerouteTest(scapyt.BaseScapyTest):
name = "Parasitic Traceroute Test"
description = "Injects duplicate TCP packets with varying TTL values by sniffing traffic"
version = '0.1'
samplePeriod = 40
def setUp(self):
self.report['parasitic_traceroute'] = {}
def test_parasitic_traceroute(self):
self.pt = ParasiticTraceroute()
config.scapyFactory.registerProtocol(self.pt)
d = defer.Deferred()
reactor.callLater(self.samplePeriod, d.callback, self.pt)
return d
def postProcessor(self, *args, **kwargs):
self.pt.stopListening()
self.report['received_packets'] = self.pt.received_packets
for packet in self.pt.received_packets:
k = (packet[IPerror].id, packet[TCPerror].sport, packet[TCPerror].dport, packet[TCPerror].seq)
if k in self.pt.matched_packets:
ttl = self.pt.matched_packets[k]['ttl']
else:
ttl = 'Unknown'
hop = (ttl, packet.src)
path = 'hops_%s' % packet[IPerror].dst
if path in self.report['parasitic_traceroute']:
self.report['parasitic_traceroute'][path].append(hop)
else:
self.report['parasitic_traceroute'][path] = [hop]
for p in self.report['parasitic_traceroute'].keys():
self.report['parasitic_traceroute'][p].sort(key=lambda x: x[0])
self.report['sent_packets'] = self.pt.sent_packets
return self.report
|
|
ac2c526fbe2a19c71b2500e98813b9adc554d147
|
tests/test_add_language/test_save_bible_data.py
|
tests/test_add_language/test_save_bible_data.py
|
# tests.test_add_language.test_save_bible_data
# coding=utf-8
from __future__ import unicode_literals
import copy
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
LANGUAGE_ID = 'swe'
BIBLE = {
'books': [{'id': 'gen', 'name': 'Första Moseboken'}],
'default_version': 33,
'versions': [{'id': 33, 'name': 'BSV'}, {'id': 154, 'name': 'B2000'}]
}
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_new():
"""should save Bible data to new data file if it doesn't exist"""
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible',
'language-{}.json'.format(LANGUAGE_ID))
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, BIBLE)
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_existing():
"""should update Bible data in existing data file"""
new_bible = copy.deepcopy(BIBLE)
new_bible['default_version'] = 154
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible', 'languageLANGUAGE_IDjson')
with open(bible_file_path, 'w') as bible_file:
json.dump(new_bible, bible_file)
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, new_bible)
|
Add tests for save_bible_data function
|
Add tests for save_bible_data function
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
Add tests for save_bible_data function
|
# tests.test_add_language.test_save_bible_data
# coding=utf-8
from __future__ import unicode_literals
import copy
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
LANGUAGE_ID = 'swe'
BIBLE = {
'books': [{'id': 'gen', 'name': 'Första Moseboken'}],
'default_version': 33,
'versions': [{'id': 33, 'name': 'BSV'}, {'id': 154, 'name': 'B2000'}]
}
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_new():
"""should save Bible data to new data file if it doesn't exist"""
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible',
'language-{}.json'.format(LANGUAGE_ID))
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, BIBLE)
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_existing():
"""should update Bible data in existing data file"""
new_bible = copy.deepcopy(BIBLE)
new_bible['default_version'] = 154
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible', 'languageLANGUAGE_IDjson')
with open(bible_file_path, 'w') as bible_file:
json.dump(new_bible, bible_file)
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, new_bible)
|
<commit_before><commit_msg>Add tests for save_bible_data function<commit_after>
|
# tests.test_add_language.test_save_bible_data
# coding=utf-8
from __future__ import unicode_literals
import copy
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
LANGUAGE_ID = 'swe'
BIBLE = {
'books': [{'id': 'gen', 'name': 'Första Moseboken'}],
'default_version': 33,
'versions': [{'id': 33, 'name': 'BSV'}, {'id': 154, 'name': 'B2000'}]
}
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_new():
"""should save Bible data to new data file if it doesn't exist"""
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible',
'language-{}.json'.format(LANGUAGE_ID))
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, BIBLE)
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_existing():
"""should update Bible data in existing data file"""
new_bible = copy.deepcopy(BIBLE)
new_bible['default_version'] = 154
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible', 'languageLANGUAGE_IDjson')
with open(bible_file_path, 'w') as bible_file:
json.dump(new_bible, bible_file)
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, new_bible)
|
Add tests for save_bible_data function# tests.test_add_language.test_save_bible_data
# coding=utf-8
from __future__ import unicode_literals
import copy
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
LANGUAGE_ID = 'swe'
BIBLE = {
'books': [{'id': 'gen', 'name': 'Första Moseboken'}],
'default_version': 33,
'versions': [{'id': 33, 'name': 'BSV'}, {'id': 154, 'name': 'B2000'}]
}
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_new():
"""should save Bible data to new data file if it doesn't exist"""
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible',
'language-{}.json'.format(LANGUAGE_ID))
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, BIBLE)
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_existing():
"""should update Bible data in existing data file"""
new_bible = copy.deepcopy(BIBLE)
new_bible['default_version'] = 154
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible', 'languageLANGUAGE_IDjson')
with open(bible_file_path, 'w') as bible_file:
json.dump(new_bible, bible_file)
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, new_bible)
|
<commit_before><commit_msg>Add tests for save_bible_data function<commit_after># tests.test_add_language.test_save_bible_data
# coding=utf-8
from __future__ import unicode_literals
import copy
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
LANGUAGE_ID = 'swe'
BIBLE = {
'books': [{'id': 'gen', 'name': 'Första Moseboken'}],
'default_version': 33,
'versions': [{'id': 33, 'name': 'BSV'}, {'id': 154, 'name': 'B2000'}]
}
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_new():
"""should save Bible data to new data file if it doesn't exist"""
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible',
'language-{}.json'.format(LANGUAGE_ID))
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, BIBLE)
@nose.with_setup(set_up, tear_down)
def test_save_bible_data_existing():
"""should update Bible data in existing data file"""
new_bible = copy.deepcopy(BIBLE)
new_bible['default_version'] = 154
bible_file_path = os.path.join(
yvs.PACKAGED_DATA_DIR_PATH, 'bible', 'languageLANGUAGE_IDjson')
with open(bible_file_path, 'w') as bible_file:
json.dump(new_bible, bible_file)
add_lang.save_bible_data(language_id=LANGUAGE_ID, bible=BIBLE)
nose.assert_true(os.path.exists(bible_file_path))
with open(bible_file_path, 'r') as bible_file:
saved_bible = json.load(bible_file)
nose.assert_equal(saved_bible, new_bible)
|
|
753e9f58d9e02db3a3daeb584c43f2b76c731542
|
geotrek/infrastructure/migrations/0025_auto_20210721_1540.py
|
geotrek/infrastructure/migrations/0025_auto_20210721_1540.py
|
# Generated by Django 3.1.13 on 2021-07-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authent', '0005_remove_userprofile_language'),
('infrastructure', '0024_auto_20210716_1043'),
]
operations = [
migrations.AlterField(
model_name='infrastructure',
name='maintenance_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of maintenance agents' interventions on infrastructure", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructuremaintenancedifficultylevel', verbose_name='Maintenance difficulty'),
),
migrations.AlterField(
model_name='infrastructure',
name='usage_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of end users' infrastructure usage", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructureusagedifficultylevel', verbose_name='Usage difficulty'),
),
migrations.AlterField(
model_name='infrastructureusagedifficultylevel',
name='label',
field=models.CharField(max_length=250, unique=True, verbose_name='Label'),
),
migrations.AlterUniqueTogether(
name='infrastructuremaintenancedifficultylevel',
unique_together={('label', 'structure')},
),
migrations.AlterUniqueTogether(
name='infrastructureusagedifficultylevel',
unique_together={('label', 'structure')},
),
]
|
Add migration for infrastructure model
|
Add migration for infrastructure model
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
Add migration for infrastructure model
|
# Generated by Django 3.1.13 on 2021-07-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authent', '0005_remove_userprofile_language'),
('infrastructure', '0024_auto_20210716_1043'),
]
operations = [
migrations.AlterField(
model_name='infrastructure',
name='maintenance_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of maintenance agents' interventions on infrastructure", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructuremaintenancedifficultylevel', verbose_name='Maintenance difficulty'),
),
migrations.AlterField(
model_name='infrastructure',
name='usage_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of end users' infrastructure usage", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructureusagedifficultylevel', verbose_name='Usage difficulty'),
),
migrations.AlterField(
model_name='infrastructureusagedifficultylevel',
name='label',
field=models.CharField(max_length=250, unique=True, verbose_name='Label'),
),
migrations.AlterUniqueTogether(
name='infrastructuremaintenancedifficultylevel',
unique_together={('label', 'structure')},
),
migrations.AlterUniqueTogether(
name='infrastructureusagedifficultylevel',
unique_together={('label', 'structure')},
),
]
|
<commit_before><commit_msg>Add migration for infrastructure model<commit_after>
|
# Generated by Django 3.1.13 on 2021-07-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authent', '0005_remove_userprofile_language'),
('infrastructure', '0024_auto_20210716_1043'),
]
operations = [
migrations.AlterField(
model_name='infrastructure',
name='maintenance_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of maintenance agents' interventions on infrastructure", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructuremaintenancedifficultylevel', verbose_name='Maintenance difficulty'),
),
migrations.AlterField(
model_name='infrastructure',
name='usage_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of end users' infrastructure usage", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructureusagedifficultylevel', verbose_name='Usage difficulty'),
),
migrations.AlterField(
model_name='infrastructureusagedifficultylevel',
name='label',
field=models.CharField(max_length=250, unique=True, verbose_name='Label'),
),
migrations.AlterUniqueTogether(
name='infrastructuremaintenancedifficultylevel',
unique_together={('label', 'structure')},
),
migrations.AlterUniqueTogether(
name='infrastructureusagedifficultylevel',
unique_together={('label', 'structure')},
),
]
|
Add migration for infrastructure model# Generated by Django 3.1.13 on 2021-07-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authent', '0005_remove_userprofile_language'),
('infrastructure', '0024_auto_20210716_1043'),
]
operations = [
migrations.AlterField(
model_name='infrastructure',
name='maintenance_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of maintenance agents' interventions on infrastructure", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructuremaintenancedifficultylevel', verbose_name='Maintenance difficulty'),
),
migrations.AlterField(
model_name='infrastructure',
name='usage_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of end users' infrastructure usage", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructureusagedifficultylevel', verbose_name='Usage difficulty'),
),
migrations.AlterField(
model_name='infrastructureusagedifficultylevel',
name='label',
field=models.CharField(max_length=250, unique=True, verbose_name='Label'),
),
migrations.AlterUniqueTogether(
name='infrastructuremaintenancedifficultylevel',
unique_together={('label', 'structure')},
),
migrations.AlterUniqueTogether(
name='infrastructureusagedifficultylevel',
unique_together={('label', 'structure')},
),
]
|
<commit_before><commit_msg>Add migration for infrastructure model<commit_after># Generated by Django 3.1.13 on 2021-07-21 15:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('authent', '0005_remove_userprofile_language'),
('infrastructure', '0024_auto_20210716_1043'),
]
operations = [
migrations.AlterField(
model_name='infrastructure',
name='maintenance_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of maintenance agents' interventions on infrastructure", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructuremaintenancedifficultylevel', verbose_name='Maintenance difficulty'),
),
migrations.AlterField(
model_name='infrastructure',
name='usage_difficulty',
field=models.ForeignKey(blank=True, help_text="Danger level of end users' infrastructure usage", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='infrastructures_set', to='infrastructure.infrastructureusagedifficultylevel', verbose_name='Usage difficulty'),
),
migrations.AlterField(
model_name='infrastructureusagedifficultylevel',
name='label',
field=models.CharField(max_length=250, unique=True, verbose_name='Label'),
),
migrations.AlterUniqueTogether(
name='infrastructuremaintenancedifficultylevel',
unique_together={('label', 'structure')},
),
migrations.AlterUniqueTogether(
name='infrastructureusagedifficultylevel',
unique_together={('label', 'structure')},
),
]
|
|
4affba629c94d4cdd12ff3aae6bb06f6a6ed215c
|
dp/longest_increasing_subsequence/python/lis.py
|
dp/longest_increasing_subsequence/python/lis.py
|
def lis(a):
dp = [1] * len(a)
for i in range(len(a)):
for j in range(i):
if a[j] < a[i]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
n = int(input("Enter number of elements: "))
a = input().split(' ')
for i in range(len(a)):
a[i] = int(a[i])
print(lis(a))
|
Add longest increasing subsequence algorithm in Python.
|
Add longest increasing subsequence algorithm in Python.
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
|
Add longest increasing subsequence algorithm in Python.
|
def lis(a):
dp = [1] * len(a)
for i in range(len(a)):
for j in range(i):
if a[j] < a[i]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
n = int(input("Enter number of elements: "))
a = input().split(' ')
for i in range(len(a)):
a[i] = int(a[i])
print(lis(a))
|
<commit_before><commit_msg>Add longest increasing subsequence algorithm in Python.<commit_after>
|
def lis(a):
dp = [1] * len(a)
for i in range(len(a)):
for j in range(i):
if a[j] < a[i]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
n = int(input("Enter number of elements: "))
a = input().split(' ')
for i in range(len(a)):
a[i] = int(a[i])
print(lis(a))
|
Add longest increasing subsequence algorithm in Python.def lis(a):
dp = [1] * len(a)
for i in range(len(a)):
for j in range(i):
if a[j] < a[i]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
n = int(input("Enter number of elements: "))
a = input().split(' ')
for i in range(len(a)):
a[i] = int(a[i])
print(lis(a))
|
<commit_before><commit_msg>Add longest increasing subsequence algorithm in Python.<commit_after>def lis(a):
dp = [1] * len(a)
for i in range(len(a)):
for j in range(i):
if a[j] < a[i]:
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
n = int(input("Enter number of elements: "))
a = input().split(' ')
for i in range(len(a)):
a[i] = int(a[i])
print(lis(a))
|
|
22e0e894bdfb457f00bad1016ae28884ef94256c
|
okupy/otp/__init__.py
|
okupy/otp/__init__.py
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
Make otp_init() race condition safe.
|
Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...
|
Python
|
agpl-3.0
|
gentoo/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
<commit_before># vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
<commit_msg>Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...<commit_after>
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
<commit_before># vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'TOTP device with LDAP secret',
})
if created:
tdev.save()
sdev, created = SOTPDevice.objects.get_or_create(
user=request.user,
defaults={
'name': 'SOTP device with LDAP passwords',
})
if created:
sdev.save()
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
<commit_msg>Make otp_init() race condition safe.
A race condition in get_or_create() may have resulted in two devices
created per user. Now we guarantee only one. Not that it matters real
much...<commit_after># vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.db import IntegrityError
from django_otp import login as otp_login
from django_otp.middleware import OTPMiddleware
from .sotp.models import SOTPDevice
from .totp.models import TOTPDevice
def init_otp(request):
"""
Initialize OTP after login. This sets up OTP devices
for django_otp and calls the middleware to fill
request.user.is_verified().
"""
tdev = TOTPDevice(user=request.user,
name='TOTP device with LDAP secret')
try:
tdev.save()
except IntegrityError:
tdev = TOTPDevice.objects.get(user=request.user)
sdev = SOTPDevice(user=request.user,
name='SOTP device with LDAP secret')
try:
sdev.save()
except IntegrityError:
pass
# if OTP is disabled, it will match already
if tdev.verify_token():
otp_login(request, tdev)
# add .is_verified()
OTPMiddleware().process_request(request)
|
1cc92f4b61841c43395c988712f98574cd2b1dc0
|
app/test/test_resultsets.py
|
app/test/test_resultsets.py
|
from app.logic import resultsets
from sympy import sympify, Derivative, Integral, I, sqrt
def test_predicates():
assert resultsets.is_approximatable_constant(sqrt(2))
assert not resultsets.is_approximatable_constant(sympify('2'))
assert resultsets.is_complex(2*I + 3)
assert not resultsets.is_complex(3)
|
Add basic test for resultsets
|
Add basic test for resultsets
|
Python
|
bsd-3-clause
|
kaichogami/sympy_gamma,kaichogami/sympy_gamma,iScienceLuvr/sympy_gamma,github4ry/sympy_gamma,bolshoibooze/sympy_gamma,kaichogami/sympy_gamma,iScienceLuvr/sympy_gamma,github4ry/sympy_gamma,iScienceLuvr/sympy_gamma,bolshoibooze/sympy_gamma,github4ry/sympy_gamma,bolshoibooze/sympy_gamma
|
Add basic test for resultsets
|
from app.logic import resultsets
from sympy import sympify, Derivative, Integral, I, sqrt
def test_predicates():
assert resultsets.is_approximatable_constant(sqrt(2))
assert not resultsets.is_approximatable_constant(sympify('2'))
assert resultsets.is_complex(2*I + 3)
assert not resultsets.is_complex(3)
|
<commit_before><commit_msg>Add basic test for resultsets<commit_after>
|
from app.logic import resultsets
from sympy import sympify, Derivative, Integral, I, sqrt
def test_predicates():
assert resultsets.is_approximatable_constant(sqrt(2))
assert not resultsets.is_approximatable_constant(sympify('2'))
assert resultsets.is_complex(2*I + 3)
assert not resultsets.is_complex(3)
|
Add basic test for resultsetsfrom app.logic import resultsets
from sympy import sympify, Derivative, Integral, I, sqrt
def test_predicates():
assert resultsets.is_approximatable_constant(sqrt(2))
assert not resultsets.is_approximatable_constant(sympify('2'))
assert resultsets.is_complex(2*I + 3)
assert not resultsets.is_complex(3)
|
<commit_before><commit_msg>Add basic test for resultsets<commit_after>from app.logic import resultsets
from sympy import sympify, Derivative, Integral, I, sqrt
def test_predicates():
assert resultsets.is_approximatable_constant(sqrt(2))
assert not resultsets.is_approximatable_constant(sympify('2'))
assert resultsets.is_complex(2*I + 3)
assert not resultsets.is_complex(3)
|
|
851eece6127b623fcb49ad5ed6738120ea0168c2
|
docs/deploy.py
|
docs/deploy.py
|
#!/usr/bin/env python
from __future__ import print_function
from contextlib import contextmanager
from glob import glob
from path import path
import os
from os.path import abspath, basename, dirname, exists, isfile
from shutil import move, rmtree
from subprocess import check_call
HERE = dirname(abspath(__file__))
ZIPLINE_ROOT = dirname(HERE)
TEMP_LOCATION = '/tmp/zipline-doc'
TEMP_LOCATION_GLOB = TEMP_LOCATION + '/*'
@contextmanager
def removing(path):
try:
yield
finally:
rmtree(path)
def ensure_not_exists(path):
if not exists(path):
return
if isfile(path):
os.unlink(path)
else:
rmtree(path)
def main():
print("Moving to %s." % HERE)
with path(HERE):
print("Building docs with 'make html'")
check_call(['make', 'html'])
print("Clearing temp location '%s'" % TEMP_LOCATION)
rmtree(TEMP_LOCATION, ignore_errors=True)
with removing(TEMP_LOCATION):
print("Copying built files to temp location.")
move('build/html', TEMP_LOCATION)
print("Moving to '%s'" % ZIPLINE_ROOT)
os.chdir(ZIPLINE_ROOT)
print("Checking out gh-pages branch.")
check_call(
[
'git', 'branch', '-f',
'--track', 'gh-pages', 'origin/gh-pages'
]
)
check_call(['git', 'checkout', 'gh-pages'])
check_call(['git', 'reset', '--hard', 'origin/gh-pages'])
print("Copying built files:")
for file_ in glob(TEMP_LOCATION_GLOB):
base = basename(file_)
print("%s -> %s" % (file_, base))
ensure_not_exists(base)
move(file_, '.')
print()
print("Updated documentation branch in directory %s" % ZIPLINE_ROOT)
print("If you are happy with these changes, commit and push to gh-pages.")
if __name__ == '__main__':
main()
|
Add script for generating docs.
|
MAINT: Add script for generating docs.
|
Python
|
apache-2.0
|
magne-max/zipline-ja,nborggren/zipline,wilsonkichoi/zipline,umuzungu/zipline,Scapogo/zipline,alphaBenj/zipline,florentchandelier/zipline,magne-max/zipline-ja,quantopian/zipline,humdings/zipline,enigmampc/catalyst,umuzungu/zipline,bartosh/zipline,grundgruen/zipline,grundgruen/zipline,quantopian/zipline,humdings/zipline,wilsonkichoi/zipline,Scapogo/zipline,enigmampc/catalyst,nborggren/zipline,florentchandelier/zipline,alphaBenj/zipline,bartosh/zipline
|
MAINT: Add script for generating docs.
|
#!/usr/bin/env python
from __future__ import print_function
from contextlib import contextmanager
from glob import glob
from path import path
import os
from os.path import abspath, basename, dirname, exists, isfile
from shutil import move, rmtree
from subprocess import check_call
HERE = dirname(abspath(__file__))
ZIPLINE_ROOT = dirname(HERE)
TEMP_LOCATION = '/tmp/zipline-doc'
TEMP_LOCATION_GLOB = TEMP_LOCATION + '/*'
@contextmanager
def removing(path):
try:
yield
finally:
rmtree(path)
def ensure_not_exists(path):
if not exists(path):
return
if isfile(path):
os.unlink(path)
else:
rmtree(path)
def main():
print("Moving to %s." % HERE)
with path(HERE):
print("Building docs with 'make html'")
check_call(['make', 'html'])
print("Clearing temp location '%s'" % TEMP_LOCATION)
rmtree(TEMP_LOCATION, ignore_errors=True)
with removing(TEMP_LOCATION):
print("Copying built files to temp location.")
move('build/html', TEMP_LOCATION)
print("Moving to '%s'" % ZIPLINE_ROOT)
os.chdir(ZIPLINE_ROOT)
print("Checking out gh-pages branch.")
check_call(
[
'git', 'branch', '-f',
'--track', 'gh-pages', 'origin/gh-pages'
]
)
check_call(['git', 'checkout', 'gh-pages'])
check_call(['git', 'reset', '--hard', 'origin/gh-pages'])
print("Copying built files:")
for file_ in glob(TEMP_LOCATION_GLOB):
base = basename(file_)
print("%s -> %s" % (file_, base))
ensure_not_exists(base)
move(file_, '.')
print()
print("Updated documentation branch in directory %s" % ZIPLINE_ROOT)
print("If you are happy with these changes, commit and push to gh-pages.")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>MAINT: Add script for generating docs.<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
from contextlib import contextmanager
from glob import glob
from path import path
import os
from os.path import abspath, basename, dirname, exists, isfile
from shutil import move, rmtree
from subprocess import check_call
HERE = dirname(abspath(__file__))
ZIPLINE_ROOT = dirname(HERE)
TEMP_LOCATION = '/tmp/zipline-doc'
TEMP_LOCATION_GLOB = TEMP_LOCATION + '/*'
@contextmanager
def removing(path):
try:
yield
finally:
rmtree(path)
def ensure_not_exists(path):
if not exists(path):
return
if isfile(path):
os.unlink(path)
else:
rmtree(path)
def main():
print("Moving to %s." % HERE)
with path(HERE):
print("Building docs with 'make html'")
check_call(['make', 'html'])
print("Clearing temp location '%s'" % TEMP_LOCATION)
rmtree(TEMP_LOCATION, ignore_errors=True)
with removing(TEMP_LOCATION):
print("Copying built files to temp location.")
move('build/html', TEMP_LOCATION)
print("Moving to '%s'" % ZIPLINE_ROOT)
os.chdir(ZIPLINE_ROOT)
print("Checking out gh-pages branch.")
check_call(
[
'git', 'branch', '-f',
'--track', 'gh-pages', 'origin/gh-pages'
]
)
check_call(['git', 'checkout', 'gh-pages'])
check_call(['git', 'reset', '--hard', 'origin/gh-pages'])
print("Copying built files:")
for file_ in glob(TEMP_LOCATION_GLOB):
base = basename(file_)
print("%s -> %s" % (file_, base))
ensure_not_exists(base)
move(file_, '.')
print()
print("Updated documentation branch in directory %s" % ZIPLINE_ROOT)
print("If you are happy with these changes, commit and push to gh-pages.")
if __name__ == '__main__':
main()
|
MAINT: Add script for generating docs.#!/usr/bin/env python
from __future__ import print_function
from contextlib import contextmanager
from glob import glob
from path import path
import os
from os.path import abspath, basename, dirname, exists, isfile
from shutil import move, rmtree
from subprocess import check_call
HERE = dirname(abspath(__file__))
ZIPLINE_ROOT = dirname(HERE)
TEMP_LOCATION = '/tmp/zipline-doc'
TEMP_LOCATION_GLOB = TEMP_LOCATION + '/*'
@contextmanager
def removing(path):
try:
yield
finally:
rmtree(path)
def ensure_not_exists(path):
if not exists(path):
return
if isfile(path):
os.unlink(path)
else:
rmtree(path)
def main():
print("Moving to %s." % HERE)
with path(HERE):
print("Building docs with 'make html'")
check_call(['make', 'html'])
print("Clearing temp location '%s'" % TEMP_LOCATION)
rmtree(TEMP_LOCATION, ignore_errors=True)
with removing(TEMP_LOCATION):
print("Copying built files to temp location.")
move('build/html', TEMP_LOCATION)
print("Moving to '%s'" % ZIPLINE_ROOT)
os.chdir(ZIPLINE_ROOT)
print("Checking out gh-pages branch.")
check_call(
[
'git', 'branch', '-f',
'--track', 'gh-pages', 'origin/gh-pages'
]
)
check_call(['git', 'checkout', 'gh-pages'])
check_call(['git', 'reset', '--hard', 'origin/gh-pages'])
print("Copying built files:")
for file_ in glob(TEMP_LOCATION_GLOB):
base = basename(file_)
print("%s -> %s" % (file_, base))
ensure_not_exists(base)
move(file_, '.')
print()
print("Updated documentation branch in directory %s" % ZIPLINE_ROOT)
print("If you are happy with these changes, commit and push to gh-pages.")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>MAINT: Add script for generating docs.<commit_after>#!/usr/bin/env python
from __future__ import print_function
from contextlib import contextmanager
from glob import glob
from path import path
import os
from os.path import abspath, basename, dirname, exists, isfile
from shutil import move, rmtree
from subprocess import check_call
HERE = dirname(abspath(__file__))
ZIPLINE_ROOT = dirname(HERE)
TEMP_LOCATION = '/tmp/zipline-doc'
TEMP_LOCATION_GLOB = TEMP_LOCATION + '/*'
@contextmanager
def removing(path):
try:
yield
finally:
rmtree(path)
def ensure_not_exists(path):
if not exists(path):
return
if isfile(path):
os.unlink(path)
else:
rmtree(path)
def main():
print("Moving to %s." % HERE)
with path(HERE):
print("Building docs with 'make html'")
check_call(['make', 'html'])
print("Clearing temp location '%s'" % TEMP_LOCATION)
rmtree(TEMP_LOCATION, ignore_errors=True)
with removing(TEMP_LOCATION):
print("Copying built files to temp location.")
move('build/html', TEMP_LOCATION)
print("Moving to '%s'" % ZIPLINE_ROOT)
os.chdir(ZIPLINE_ROOT)
print("Checking out gh-pages branch.")
check_call(
[
'git', 'branch', '-f',
'--track', 'gh-pages', 'origin/gh-pages'
]
)
check_call(['git', 'checkout', 'gh-pages'])
check_call(['git', 'reset', '--hard', 'origin/gh-pages'])
print("Copying built files:")
for file_ in glob(TEMP_LOCATION_GLOB):
base = basename(file_)
print("%s -> %s" % (file_, base))
ensure_not_exists(base)
move(file_, '.')
print()
print("Updated documentation branch in directory %s" % ZIPLINE_ROOT)
print("If you are happy with these changes, commit and push to gh-pages.")
if __name__ == '__main__':
main()
|
|
fa09dd08cd7b462f2b9f9b860aa11cd64f00c913
|
Arrays/arrange_numbers_to_form_biggest_number.py
|
Arrays/arrange_numbers_to_form_biggest_number.py
|
import unittest
"""
Given an array of numbers, arrange them in a way that yields the maximum value.
Input: 54 546 548 60
Output: 6054854654
Input: 1 34 3 98 9 76 45 4
Output: 998764543431
"""
"""
Approach:
1. Sort the numbers using a custom comparator.
2. The comparator works as follows: suppose it takes two numbers X and Y.
3. If XY > YX, it returns 1, elif XY < YX, it returns -1 else it returns 0.
4. For eg. suppose X = 60 and Y = 548. XY = 60548, YX = 54860. So, X should come before Y, hence return 1.
"""
def combined_number_compartor(first, second):
str_first_second = str(first) + str(second)
str_second_first = str(second) + str(first)
int_first_second = int(str_first_second)
int_second_first = int(str_second_first)
# Sorting is in non-increasing order
if int_first_second > int_second_first:
return -1
elif int_first_second < int_second_first:
return 1
return 0
def arrange_numbers_to_form_biggest_number(list_of_numbers):
list_of_numbers = sorted(list_of_numbers, cmp=combined_number_compartor)
return int(''.join(map(lambda x: str(x), list_of_numbers)))
class TestArrangement(unittest.TestCase):
def test_arrangement(self):
self.assertEqual(arrange_numbers_to_form_biggest_number([54, 546, 548, 60]), 6054854654)
|
Rearrange array numbers to form biggest number
|
Rearrange array numbers to form biggest number
|
Python
|
mit
|
prathamtandon/g4gproblems
|
Rearrange array numbers to form biggest number
|
import unittest
"""
Given an array of numbers, arrange them in a way that yields the maximum value.
Input: 54 546 548 60
Output: 6054854654
Input: 1 34 3 98 9 76 45 4
Output: 998764543431
"""
"""
Approach:
1. Sort the numbers using a custom comparator.
2. The comparator works as follows: suppose it takes two numbers X and Y.
3. If XY > YX, it returns 1, elif XY < YX, it returns -1 else it returns 0.
4. For eg. suppose X = 60 and Y = 548. XY = 60548, YX = 54860. So, X should come before Y, hence return 1.
"""
def combined_number_compartor(first, second):
str_first_second = str(first) + str(second)
str_second_first = str(second) + str(first)
int_first_second = int(str_first_second)
int_second_first = int(str_second_first)
# Sorting is in non-increasing order
if int_first_second > int_second_first:
return -1
elif int_first_second < int_second_first:
return 1
return 0
def arrange_numbers_to_form_biggest_number(list_of_numbers):
list_of_numbers = sorted(list_of_numbers, cmp=combined_number_compartor)
return int(''.join(map(lambda x: str(x), list_of_numbers)))
class TestArrangement(unittest.TestCase):
def test_arrangement(self):
self.assertEqual(arrange_numbers_to_form_biggest_number([54, 546, 548, 60]), 6054854654)
|
<commit_before><commit_msg>Rearrange array numbers to form biggest number<commit_after>
|
import unittest
"""
Given an array of numbers, arrange them in a way that yields the maximum value.
Input: 54 546 548 60
Output: 6054854654
Input: 1 34 3 98 9 76 45 4
Output: 998764543431
"""
"""
Approach:
1. Sort the numbers using a custom comparator.
2. The comparator works as follows: suppose it takes two numbers X and Y.
3. If XY > YX, it returns 1, elif XY < YX, it returns -1 else it returns 0.
4. For eg. suppose X = 60 and Y = 548. XY = 60548, YX = 54860. So, X should come before Y, hence return 1.
"""
def combined_number_compartor(first, second):
str_first_second = str(first) + str(second)
str_second_first = str(second) + str(first)
int_first_second = int(str_first_second)
int_second_first = int(str_second_first)
# Sorting is in non-increasing order
if int_first_second > int_second_first:
return -1
elif int_first_second < int_second_first:
return 1
return 0
def arrange_numbers_to_form_biggest_number(list_of_numbers):
list_of_numbers = sorted(list_of_numbers, cmp=combined_number_compartor)
return int(''.join(map(lambda x: str(x), list_of_numbers)))
class TestArrangement(unittest.TestCase):
def test_arrangement(self):
self.assertEqual(arrange_numbers_to_form_biggest_number([54, 546, 548, 60]), 6054854654)
|
Rearrange array numbers to form biggest numberimport unittest
"""
Given an array of numbers, arrange them in a way that yields the maximum value.
Input: 54 546 548 60
Output: 6054854654
Input: 1 34 3 98 9 76 45 4
Output: 998764543431
"""
"""
Approach:
1. Sort the numbers using a custom comparator.
2. The comparator works as follows: suppose it takes two numbers X and Y.
3. If XY > YX, it returns 1, elif XY < YX, it returns -1 else it returns 0.
4. For eg. suppose X = 60 and Y = 548. XY = 60548, YX = 54860. So, X should come before Y, hence return 1.
"""
def combined_number_compartor(first, second):
str_first_second = str(first) + str(second)
str_second_first = str(second) + str(first)
int_first_second = int(str_first_second)
int_second_first = int(str_second_first)
# Sorting is in non-increasing order
if int_first_second > int_second_first:
return -1
elif int_first_second < int_second_first:
return 1
return 0
def arrange_numbers_to_form_biggest_number(list_of_numbers):
list_of_numbers = sorted(list_of_numbers, cmp=combined_number_compartor)
return int(''.join(map(lambda x: str(x), list_of_numbers)))
class TestArrangement(unittest.TestCase):
def test_arrangement(self):
self.assertEqual(arrange_numbers_to_form_biggest_number([54, 546, 548, 60]), 6054854654)
|
<commit_before><commit_msg>Rearrange array numbers to form biggest number<commit_after>import unittest
"""
Given an array of numbers, arrange them in a way that yields the maximum value.
Input: 54 546 548 60
Output: 6054854654
Input: 1 34 3 98 9 76 45 4
Output: 998764543431
"""
"""
Approach:
1. Sort the numbers using a custom comparator.
2. The comparator works as follows: suppose it takes two numbers X and Y.
3. If XY > YX, it returns 1, elif XY < YX, it returns -1 else it returns 0.
4. For eg. suppose X = 60 and Y = 548. XY = 60548, YX = 54860. So, X should come before Y, hence return 1.
"""
def combined_number_compartor(first, second):
str_first_second = str(first) + str(second)
str_second_first = str(second) + str(first)
int_first_second = int(str_first_second)
int_second_first = int(str_second_first)
# Sorting is in non-increasing order
if int_first_second > int_second_first:
return -1
elif int_first_second < int_second_first:
return 1
return 0
def arrange_numbers_to_form_biggest_number(list_of_numbers):
list_of_numbers = sorted(list_of_numbers, cmp=combined_number_compartor)
return int(''.join(map(lambda x: str(x), list_of_numbers)))
class TestArrangement(unittest.TestCase):
def test_arrangement(self):
self.assertEqual(arrange_numbers_to_form_biggest_number([54, 546, 548, 60]), 6054854654)
|
|
ec0b4994decd7daf532a8adc7f1c5eed5e3df812
|
development/importMaven/import_maven_artifacts.py
|
development/importMaven/import_maven_artifacts.py
|
#!/usr/bin/python
"""
Copyright 2018 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import subprocess
NAME_HELP = '''
The name of the artifact you want to add to the prebuilts folder.
E.g. android.arch.work:work-runtime-ktx:1.0.0-alpha07
'''
def main():
"""Parses the command line arguments, and executes the gradle script
which downloads the maven artifacts.
"""
parser = argparse.ArgumentParser(
description='Helps download maven artifacts to prebuilts.')
parser.add_argument('-n', '--name', help=NAME_HELP,
required=True, dest='name')
parse_result = parser.parse_args()
artifact_name = parse_result.name
command = './gradlew --build-file build.gradle.kts -PartifactName=%s' % (
artifact_name)
process = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr is not None:
lines = stderr.split('\n')
for line in lines:
print line
if stdout is not None:
lines = stdout.split('\n')
for line in lines:
print line
if __name__ == '__main__':
main()
|
Add the helper script to import packages from Maven.
|
Add the helper script to import packages from Maven.
Test: Tested the script locally.
Change-Id: I0b3bf330d340a30f14ddbe5704668e9666577d8d
|
Python
|
apache-2.0
|
androidx/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx
|
Add the helper script to import packages from Maven.
Test: Tested the script locally.
Change-Id: I0b3bf330d340a30f14ddbe5704668e9666577d8d
|
#!/usr/bin/python
"""
Copyright 2018 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import subprocess
NAME_HELP = '''
The name of the artifact you want to add to the prebuilts folder.
E.g. android.arch.work:work-runtime-ktx:1.0.0-alpha07
'''
def main():
"""Parses the command line arguments, and executes the gradle script
which downloads the maven artifacts.
"""
parser = argparse.ArgumentParser(
description='Helps download maven artifacts to prebuilts.')
parser.add_argument('-n', '--name', help=NAME_HELP,
required=True, dest='name')
parse_result = parser.parse_args()
artifact_name = parse_result.name
command = './gradlew --build-file build.gradle.kts -PartifactName=%s' % (
artifact_name)
process = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr is not None:
lines = stderr.split('\n')
for line in lines:
print line
if stdout is not None:
lines = stdout.split('\n')
for line in lines:
print line
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add the helper script to import packages from Maven.
Test: Tested the script locally.
Change-Id: I0b3bf330d340a30f14ddbe5704668e9666577d8d<commit_after>
|
#!/usr/bin/python
"""
Copyright 2018 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import subprocess
NAME_HELP = '''
The name of the artifact you want to add to the prebuilts folder.
E.g. android.arch.work:work-runtime-ktx:1.0.0-alpha07
'''
def main():
"""Parses the command line arguments, and executes the gradle script
which downloads the maven artifacts.
"""
parser = argparse.ArgumentParser(
description='Helps download maven artifacts to prebuilts.')
parser.add_argument('-n', '--name', help=NAME_HELP,
required=True, dest='name')
parse_result = parser.parse_args()
artifact_name = parse_result.name
command = './gradlew --build-file build.gradle.kts -PartifactName=%s' % (
artifact_name)
process = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr is not None:
lines = stderr.split('\n')
for line in lines:
print line
if stdout is not None:
lines = stdout.split('\n')
for line in lines:
print line
if __name__ == '__main__':
main()
|
Add the helper script to import packages from Maven.
Test: Tested the script locally.
Change-Id: I0b3bf330d340a30f14ddbe5704668e9666577d8d#!/usr/bin/python
"""
Copyright 2018 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import subprocess
NAME_HELP = '''
The name of the artifact you want to add to the prebuilts folder.
E.g. android.arch.work:work-runtime-ktx:1.0.0-alpha07
'''
def main():
"""Parses the command line arguments, and executes the gradle script
which downloads the maven artifacts.
"""
parser = argparse.ArgumentParser(
description='Helps download maven artifacts to prebuilts.')
parser.add_argument('-n', '--name', help=NAME_HELP,
required=True, dest='name')
parse_result = parser.parse_args()
artifact_name = parse_result.name
command = './gradlew --build-file build.gradle.kts -PartifactName=%s' % (
artifact_name)
process = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr is not None:
lines = stderr.split('\n')
for line in lines:
print line
if stdout is not None:
lines = stdout.split('\n')
for line in lines:
print line
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add the helper script to import packages from Maven.
Test: Tested the script locally.
Change-Id: I0b3bf330d340a30f14ddbe5704668e9666577d8d<commit_after>#!/usr/bin/python
"""
Copyright 2018 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import subprocess
NAME_HELP = '''
The name of the artifact you want to add to the prebuilts folder.
E.g. android.arch.work:work-runtime-ktx:1.0.0-alpha07
'''
def main():
"""Parses the command line arguments, and executes the gradle script
which downloads the maven artifacts.
"""
parser = argparse.ArgumentParser(
description='Helps download maven artifacts to prebuilts.')
parser.add_argument('-n', '--name', help=NAME_HELP,
required=True, dest='name')
parse_result = parser.parse_args()
artifact_name = parse_result.name
command = './gradlew --build-file build.gradle.kts -PartifactName=%s' % (
artifact_name)
process = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if stderr is not None:
lines = stderr.split('\n')
for line in lines:
print line
if stdout is not None:
lines = stdout.split('\n')
for line in lines:
print line
if __name__ == '__main__':
main()
|
|
c5296f0246c3e2cbf50ca7cad0c7f1130b0dd611
|
analysis_10x10.py
|
analysis_10x10.py
|
# -*- coding:utf-8 -*-
from pylab import *
import tables
"""
TODO
- Faire tous les MPS
- Faire tous les STS
- Faire le déphasage
"""
DB = tables.openFile('db.h5')
# Get data
DATA = []
for g in DB.walkGroups():
try:
pset = g.paramset._v_attrs
res = g.results._v_attrs
except tables.NoSuchNodeError:
pass
else:
new_data_item = {}
# Interconnection rate and strength
common = pset['Common']
interco_strength = common['inter_conn_strength'][0][1]
interco_rate = common['inter_conn_rate'][0][1]
new_data_item['interco'] = {}
new_data_item['interco']['strength'] = interco_strength
new_data_item['interco']['rate'] = interco_rate
# MPS
new_data_item['MPS'] = res['MPS']
# MPS
new_data_item['STS'] = res['STS']
# Add data item
DATA.append(new_data_item)
# Plot data
# MPS
MPS = []
for simu in DATA:
rate = simu['interco']['rate']
strength = simu['interco']['strength']
MPS.append((rate, strength, simu['MPS']))
MPS.sort()
# plot MPS whole
X = linspace(0, 1, 10)
Y = linspace(0, 1, 10)
MESH = zeros((len(X), len(Y)))
for indx in xrange(len(X)):
for indy in xrange(len(Y)):
MESH[indx][indy] = MPS[indy*len(X) + indx][2]['whole']
CS = contourf(X, Y, MESH)
colorbar(CS) # TODO mauvaise échelle...
show()
DB.close()
|
Add an analysis file for 10x10 simulations
|
Add an analysis file for 10x10 simulations
Not finished, MPS Whole figure is quite ok.
|
Python
|
mit
|
neuro-lyon/multiglom-model,neuro-lyon/multiglom-model
|
Add an analysis file for 10x10 simulations
Not finished, MPS Whole figure is quite ok.
|
# -*- coding:utf-8 -*-
from pylab import *
import tables
"""
TODO
- Faire tous les MPS
- Faire tous les STS
- Faire le déphasage
"""
DB = tables.openFile('db.h5')
# Get data
DATA = []
for g in DB.walkGroups():
try:
pset = g.paramset._v_attrs
res = g.results._v_attrs
except tables.NoSuchNodeError:
pass
else:
new_data_item = {}
# Interconnection rate and strength
common = pset['Common']
interco_strength = common['inter_conn_strength'][0][1]
interco_rate = common['inter_conn_rate'][0][1]
new_data_item['interco'] = {}
new_data_item['interco']['strength'] = interco_strength
new_data_item['interco']['rate'] = interco_rate
# MPS
new_data_item['MPS'] = res['MPS']
# MPS
new_data_item['STS'] = res['STS']
# Add data item
DATA.append(new_data_item)
# Plot data
# MPS
MPS = []
for simu in DATA:
rate = simu['interco']['rate']
strength = simu['interco']['strength']
MPS.append((rate, strength, simu['MPS']))
MPS.sort()
# plot MPS whole
X = linspace(0, 1, 10)
Y = linspace(0, 1, 10)
MESH = zeros((len(X), len(Y)))
for indx in xrange(len(X)):
for indy in xrange(len(Y)):
MESH[indx][indy] = MPS[indy*len(X) + indx][2]['whole']
CS = contourf(X, Y, MESH)
colorbar(CS) # TODO mauvaise échelle...
show()
DB.close()
|
<commit_before><commit_msg>Add an analysis file for 10x10 simulations
Not finished, MPS Whole figure is quite ok.<commit_after>
|
# -*- coding:utf-8 -*-
from pylab import *
import tables
"""
TODO
- Faire tous les MPS
- Faire tous les STS
- Faire le déphasage
"""
DB = tables.openFile('db.h5')
# Get data
DATA = []
for g in DB.walkGroups():
try:
pset = g.paramset._v_attrs
res = g.results._v_attrs
except tables.NoSuchNodeError:
pass
else:
new_data_item = {}
# Interconnection rate and strength
common = pset['Common']
interco_strength = common['inter_conn_strength'][0][1]
interco_rate = common['inter_conn_rate'][0][1]
new_data_item['interco'] = {}
new_data_item['interco']['strength'] = interco_strength
new_data_item['interco']['rate'] = interco_rate
# MPS
new_data_item['MPS'] = res['MPS']
# MPS
new_data_item['STS'] = res['STS']
# Add data item
DATA.append(new_data_item)
# Plot data
# MPS
MPS = []
for simu in DATA:
rate = simu['interco']['rate']
strength = simu['interco']['strength']
MPS.append((rate, strength, simu['MPS']))
MPS.sort()
# plot MPS whole
X = linspace(0, 1, 10)
Y = linspace(0, 1, 10)
MESH = zeros((len(X), len(Y)))
for indx in xrange(len(X)):
for indy in xrange(len(Y)):
MESH[indx][indy] = MPS[indy*len(X) + indx][2]['whole']
CS = contourf(X, Y, MESH)
colorbar(CS) # TODO mauvaise échelle...
show()
DB.close()
|
Add an analysis file for 10x10 simulations
Not finished, MPS Whole figure is quite ok.# -*- coding:utf-8 -*-
from pylab import *
import tables
"""
TODO
- Faire tous les MPS
- Faire tous les STS
- Faire le déphasage
"""
DB = tables.openFile('db.h5')
# Get data
DATA = []
for g in DB.walkGroups():
try:
pset = g.paramset._v_attrs
res = g.results._v_attrs
except tables.NoSuchNodeError:
pass
else:
new_data_item = {}
# Interconnection rate and strength
common = pset['Common']
interco_strength = common['inter_conn_strength'][0][1]
interco_rate = common['inter_conn_rate'][0][1]
new_data_item['interco'] = {}
new_data_item['interco']['strength'] = interco_strength
new_data_item['interco']['rate'] = interco_rate
# MPS
new_data_item['MPS'] = res['MPS']
# MPS
new_data_item['STS'] = res['STS']
# Add data item
DATA.append(new_data_item)
# Plot data
# MPS
MPS = []
for simu in DATA:
rate = simu['interco']['rate']
strength = simu['interco']['strength']
MPS.append((rate, strength, simu['MPS']))
MPS.sort()
# plot MPS whole
X = linspace(0, 1, 10)
Y = linspace(0, 1, 10)
MESH = zeros((len(X), len(Y)))
for indx in xrange(len(X)):
for indy in xrange(len(Y)):
MESH[indx][indy] = MPS[indy*len(X) + indx][2]['whole']
CS = contourf(X, Y, MESH)
colorbar(CS) # TODO mauvaise échelle...
show()
DB.close()
|
<commit_before><commit_msg>Add an analysis file for 10x10 simulations
Not finished, MPS Whole figure is quite ok.<commit_after># -*- coding:utf-8 -*-
from pylab import *
import tables
"""
TODO
- Faire tous les MPS
- Faire tous les STS
- Faire le déphasage
"""
DB = tables.openFile('db.h5')
# Get data
DATA = []
for g in DB.walkGroups():
try:
pset = g.paramset._v_attrs
res = g.results._v_attrs
except tables.NoSuchNodeError:
pass
else:
new_data_item = {}
# Interconnection rate and strength
common = pset['Common']
interco_strength = common['inter_conn_strength'][0][1]
interco_rate = common['inter_conn_rate'][0][1]
new_data_item['interco'] = {}
new_data_item['interco']['strength'] = interco_strength
new_data_item['interco']['rate'] = interco_rate
# MPS
new_data_item['MPS'] = res['MPS']
# MPS
new_data_item['STS'] = res['STS']
# Add data item
DATA.append(new_data_item)
# Plot data
# MPS
MPS = []
for simu in DATA:
rate = simu['interco']['rate']
strength = simu['interco']['strength']
MPS.append((rate, strength, simu['MPS']))
MPS.sort()
# plot MPS whole
X = linspace(0, 1, 10)
Y = linspace(0, 1, 10)
MESH = zeros((len(X), len(Y)))
for indx in xrange(len(X)):
for indy in xrange(len(Y)):
MESH[indx][indy] = MPS[indy*len(X) + indx][2]['whole']
CS = contourf(X, Y, MESH)
colorbar(CS) # TODO mauvaise échelle...
show()
DB.close()
|
|
2a98676fc3de7cc8c7f335143aaecbe1f4ed5dc3
|
examples/comp/set_backgrounds_to_1920x1080_32bit.py
|
examples/comp/set_backgrounds_to_1920x1080_32bit.py
|
"""Sets all Backgrounds in the currently active comp to 1920x1080 (32 bit).
This example shows how to list tool of a specific type and set some of its
inputs. Additionally this shows off how `fusionless` is able to automatically
interpret an enum value (like "float32" for image depth) to the corresponding
float value that Fusion requires to be set internally.
"""
import fusionless as fu
import fusionless.context as fuCtx
c = fu.Comp()
with fuCtx.lock_and_undo_chunk(c, "Set all backgrounds to 1920x1080 (32 bit)"):
# Get all backgrounds in the current comp
tools = c.get_tool_list(selected=False,
node_type="Background")
for tool in tools:
tool.input("Width").set_value(1920)
tool.input("Height").set_value(1080)
# Set the depth to "float32". Note that
# fusion internally uses float value indices
# for the different values. `fusionless` will
# automatically convert enum values to their
# corresponding float value when possible.
tool.input("Depth").set_value("float32")
# So the depth would internally get set like
# tool.input("Depth").set_value(4.0)
|
Add set background resolution and bit depth example
|
Add set background resolution and bit depth example
|
Python
|
bsd-3-clause
|
BigRoy/fusionscript,BigRoy/fusionless
|
Add set background resolution and bit depth example
|
"""Sets all Backgrounds in the currently active comp to 1920x1080 (32 bit).
This example shows how to list tool of a specific type and set some of its
inputs. Additionally this shows off how `fusionless` is able to automatically
interpret an enum value (like "float32" for image depth) to the corresponding
float value that Fusion requires to be set internally.
"""
import fusionless as fu
import fusionless.context as fuCtx
c = fu.Comp()
with fuCtx.lock_and_undo_chunk(c, "Set all backgrounds to 1920x1080 (32 bit)"):
# Get all backgrounds in the current comp
tools = c.get_tool_list(selected=False,
node_type="Background")
for tool in tools:
tool.input("Width").set_value(1920)
tool.input("Height").set_value(1080)
# Set the depth to "float32". Note that
# fusion internally uses float value indices
# for the different values. `fusionless` will
# automatically convert enum values to their
# corresponding float value when possible.
tool.input("Depth").set_value("float32")
# So the depth would internally get set like
# tool.input("Depth").set_value(4.0)
|
<commit_before><commit_msg>Add set background resolution and bit depth example<commit_after>
|
"""Sets all Backgrounds in the currently active comp to 1920x1080 (32 bit).
This example shows how to list tool of a specific type and set some of its
inputs. Additionally this shows off how `fusionless` is able to automatically
interpret an enum value (like "float32" for image depth) to the corresponding
float value that Fusion requires to be set internally.
"""
import fusionless as fu
import fusionless.context as fuCtx
c = fu.Comp()
with fuCtx.lock_and_undo_chunk(c, "Set all backgrounds to 1920x1080 (32 bit)"):
# Get all backgrounds in the current comp
tools = c.get_tool_list(selected=False,
node_type="Background")
for tool in tools:
tool.input("Width").set_value(1920)
tool.input("Height").set_value(1080)
# Set the depth to "float32". Note that
# fusion internally uses float value indices
# for the different values. `fusionless` will
# automatically convert enum values to their
# corresponding float value when possible.
tool.input("Depth").set_value("float32")
# So the depth would internally get set like
# tool.input("Depth").set_value(4.0)
|
Add set background resolution and bit depth example"""Sets all Backgrounds in the currently active comp to 1920x1080 (32 bit).
This example shows how to list tool of a specific type and set some of its
inputs. Additionally this shows off how `fusionless` is able to automatically
interpret an enum value (like "float32" for image depth) to the corresponding
float value that Fusion requires to be set internally.
"""
import fusionless as fu
import fusionless.context as fuCtx
c = fu.Comp()
with fuCtx.lock_and_undo_chunk(c, "Set all backgrounds to 1920x1080 (32 bit)"):
# Get all backgrounds in the current comp
tools = c.get_tool_list(selected=False,
node_type="Background")
for tool in tools:
tool.input("Width").set_value(1920)
tool.input("Height").set_value(1080)
# Set the depth to "float32". Note that
# fusion internally uses float value indices
# for the different values. `fusionless` will
# automatically convert enum values to their
# corresponding float value when possible.
tool.input("Depth").set_value("float32")
# So the depth would internally get set like
# tool.input("Depth").set_value(4.0)
|
<commit_before><commit_msg>Add set background resolution and bit depth example<commit_after>"""Sets all Backgrounds in the currently active comp to 1920x1080 (32 bit).
This example shows how to list tool of a specific type and set some of its
inputs. Additionally this shows off how `fusionless` is able to automatically
interpret an enum value (like "float32" for image depth) to the corresponding
float value that Fusion requires to be set internally.
"""
import fusionless as fu
import fusionless.context as fuCtx
c = fu.Comp()
with fuCtx.lock_and_undo_chunk(c, "Set all backgrounds to 1920x1080 (32 bit)"):
# Get all backgrounds in the current comp
tools = c.get_tool_list(selected=False,
node_type="Background")
for tool in tools:
tool.input("Width").set_value(1920)
tool.input("Height").set_value(1080)
# Set the depth to "float32". Note that
# fusion internally uses float value indices
# for the different values. `fusionless` will
# automatically convert enum values to their
# corresponding float value when possible.
tool.input("Depth").set_value("float32")
# So the depth would internally get set like
# tool.input("Depth").set_value(4.0)
|
|
44452f46ac991307755a778d9057b30c80e74078
|
gem/management/commands/add_images_to_articles.py
|
gem/management/commands/add_images_to_articles.py
|
from __future__ import absolute_import, unicode_literals
import csv
from babel import Locale
from django.core.management.base import BaseCommand
from wagtail.wagtailimages.tests.utils import Image
from molo.core.models import (
Languages, Tag, ArticlePage, ArticlePageTags, Main, SectionIndexPage,
TagIndexPage)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv_name', type=str)
def handle(self, *args, **options):
csv_name = options.get('csv_name', None)
mains = Main.objects.all()
articles = {}
with open(csv_name) as articles_tags:
reader = csv.reader(articles_tags)
if mains:
for row in reader:
key = row[0]
articles[key] = row[1:]
for main in mains:
section_index = SectionIndexPage.objects.child_of(main).first()
main_lang = Languages.for_site(main.get_site()).languages.filter(
is_active=True, is_main_language=True).first()
if section_index and main_lang:
if main_lang.locale == 'en':
for article_slug in articles:
article = ArticlePage.objects.descendant_of(
section_index).filter(slug=article_slug).first()
if article:
for image_title in articles.get(article_slug):
image = Image.objects.filter(
title=image_title).first()
if image:
article.image = image
article.save_revision().publish()
else:
self.stdout.write(self.style.NOTICE(
'Image "%s" does not exist in "%s"'
% (image_title, main)))
else:
self.stdout.write(self.style.ERROR(
'Article "%s" does not exist in "%s"'
% (article_slug, main.get_site())))
else:
self.stdout.write(self.style.NOTICE(
'Main language of "%s" is not English.'
' The main language is "%s"'
% (main.get_site(), main_lang)))
else:
if not section_index:
self.stdout.write(self.style.NOTICE(
'Section Index Page does not exist in "%s"' % main))
if not main_lang:
self.stdout.write(self.style.NOTICE(
'Main language does not exist in "%s"' % main))
|
Add management command for adding images to articles
|
Add management command for adding images to articles
|
Python
|
bsd-2-clause
|
praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem
|
Add management command for adding images to articles
|
from __future__ import absolute_import, unicode_literals
import csv
from babel import Locale
from django.core.management.base import BaseCommand
from wagtail.wagtailimages.tests.utils import Image
from molo.core.models import (
Languages, Tag, ArticlePage, ArticlePageTags, Main, SectionIndexPage,
TagIndexPage)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv_name', type=str)
def handle(self, *args, **options):
csv_name = options.get('csv_name', None)
mains = Main.objects.all()
articles = {}
with open(csv_name) as articles_tags:
reader = csv.reader(articles_tags)
if mains:
for row in reader:
key = row[0]
articles[key] = row[1:]
for main in mains:
section_index = SectionIndexPage.objects.child_of(main).first()
main_lang = Languages.for_site(main.get_site()).languages.filter(
is_active=True, is_main_language=True).first()
if section_index and main_lang:
if main_lang.locale == 'en':
for article_slug in articles:
article = ArticlePage.objects.descendant_of(
section_index).filter(slug=article_slug).first()
if article:
for image_title in articles.get(article_slug):
image = Image.objects.filter(
title=image_title).first()
if image:
article.image = image
article.save_revision().publish()
else:
self.stdout.write(self.style.NOTICE(
'Image "%s" does not exist in "%s"'
% (image_title, main)))
else:
self.stdout.write(self.style.ERROR(
'Article "%s" does not exist in "%s"'
% (article_slug, main.get_site())))
else:
self.stdout.write(self.style.NOTICE(
'Main language of "%s" is not English.'
' The main language is "%s"'
% (main.get_site(), main_lang)))
else:
if not section_index:
self.stdout.write(self.style.NOTICE(
'Section Index Page does not exist in "%s"' % main))
if not main_lang:
self.stdout.write(self.style.NOTICE(
'Main language does not exist in "%s"' % main))
|
<commit_before><commit_msg>Add management command for adding images to articles<commit_after>
|
from __future__ import absolute_import, unicode_literals
import csv
from babel import Locale
from django.core.management.base import BaseCommand
from wagtail.wagtailimages.tests.utils import Image
from molo.core.models import (
Languages, Tag, ArticlePage, ArticlePageTags, Main, SectionIndexPage,
TagIndexPage)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv_name', type=str)
def handle(self, *args, **options):
csv_name = options.get('csv_name', None)
mains = Main.objects.all()
articles = {}
with open(csv_name) as articles_tags:
reader = csv.reader(articles_tags)
if mains:
for row in reader:
key = row[0]
articles[key] = row[1:]
for main in mains:
section_index = SectionIndexPage.objects.child_of(main).first()
main_lang = Languages.for_site(main.get_site()).languages.filter(
is_active=True, is_main_language=True).first()
if section_index and main_lang:
if main_lang.locale == 'en':
for article_slug in articles:
article = ArticlePage.objects.descendant_of(
section_index).filter(slug=article_slug).first()
if article:
for image_title in articles.get(article_slug):
image = Image.objects.filter(
title=image_title).first()
if image:
article.image = image
article.save_revision().publish()
else:
self.stdout.write(self.style.NOTICE(
'Image "%s" does not exist in "%s"'
% (image_title, main)))
else:
self.stdout.write(self.style.ERROR(
'Article "%s" does not exist in "%s"'
% (article_slug, main.get_site())))
else:
self.stdout.write(self.style.NOTICE(
'Main language of "%s" is not English.'
' The main language is "%s"'
% (main.get_site(), main_lang)))
else:
if not section_index:
self.stdout.write(self.style.NOTICE(
'Section Index Page does not exist in "%s"' % main))
if not main_lang:
self.stdout.write(self.style.NOTICE(
'Main language does not exist in "%s"' % main))
|
Add management command for adding images to articlesfrom __future__ import absolute_import, unicode_literals
import csv
from babel import Locale
from django.core.management.base import BaseCommand
from wagtail.wagtailimages.tests.utils import Image
from molo.core.models import (
Languages, Tag, ArticlePage, ArticlePageTags, Main, SectionIndexPage,
TagIndexPage)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv_name', type=str)
def handle(self, *args, **options):
csv_name = options.get('csv_name', None)
mains = Main.objects.all()
articles = {}
with open(csv_name) as articles_tags:
reader = csv.reader(articles_tags)
if mains:
for row in reader:
key = row[0]
articles[key] = row[1:]
for main in mains:
section_index = SectionIndexPage.objects.child_of(main).first()
main_lang = Languages.for_site(main.get_site()).languages.filter(
is_active=True, is_main_language=True).first()
if section_index and main_lang:
if main_lang.locale == 'en':
for article_slug in articles:
article = ArticlePage.objects.descendant_of(
section_index).filter(slug=article_slug).first()
if article:
for image_title in articles.get(article_slug):
image = Image.objects.filter(
title=image_title).first()
if image:
article.image = image
article.save_revision().publish()
else:
self.stdout.write(self.style.NOTICE(
'Image "%s" does not exist in "%s"'
% (image_title, main)))
else:
self.stdout.write(self.style.ERROR(
'Article "%s" does not exist in "%s"'
% (article_slug, main.get_site())))
else:
self.stdout.write(self.style.NOTICE(
'Main language of "%s" is not English.'
' The main language is "%s"'
% (main.get_site(), main_lang)))
else:
if not section_index:
self.stdout.write(self.style.NOTICE(
'Section Index Page does not exist in "%s"' % main))
if not main_lang:
self.stdout.write(self.style.NOTICE(
'Main language does not exist in "%s"' % main))
|
<commit_before><commit_msg>Add management command for adding images to articles<commit_after>from __future__ import absolute_import, unicode_literals
import csv
from babel import Locale
from django.core.management.base import BaseCommand
from wagtail.wagtailimages.tests.utils import Image
from molo.core.models import (
Languages, Tag, ArticlePage, ArticlePageTags, Main, SectionIndexPage,
TagIndexPage)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('csv_name', type=str)
def handle(self, *args, **options):
csv_name = options.get('csv_name', None)
mains = Main.objects.all()
articles = {}
with open(csv_name) as articles_tags:
reader = csv.reader(articles_tags)
if mains:
for row in reader:
key = row[0]
articles[key] = row[1:]
for main in mains:
section_index = SectionIndexPage.objects.child_of(main).first()
main_lang = Languages.for_site(main.get_site()).languages.filter(
is_active=True, is_main_language=True).first()
if section_index and main_lang:
if main_lang.locale == 'en':
for article_slug in articles:
article = ArticlePage.objects.descendant_of(
section_index).filter(slug=article_slug).first()
if article:
for image_title in articles.get(article_slug):
image = Image.objects.filter(
title=image_title).first()
if image:
article.image = image
article.save_revision().publish()
else:
self.stdout.write(self.style.NOTICE(
'Image "%s" does not exist in "%s"'
% (image_title, main)))
else:
self.stdout.write(self.style.ERROR(
'Article "%s" does not exist in "%s"'
% (article_slug, main.get_site())))
else:
self.stdout.write(self.style.NOTICE(
'Main language of "%s" is not English.'
' The main language is "%s"'
% (main.get_site(), main_lang)))
else:
if not section_index:
self.stdout.write(self.style.NOTICE(
'Section Index Page does not exist in "%s"' % main))
if not main_lang:
self.stdout.write(self.style.NOTICE(
'Main language does not exist in "%s"' % main))
|
|
cddc7881b40ae9f9acdc42102185cf7930ccd73b
|
astropy/modeling/tests/test_functional_models.py
|
astropy/modeling/tests/test_functional_models.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from numpy.testing import assert_allclose
from .. import models
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
def test_Trapezoid1DModel():
"""Regression test for
https://github.com/astropy/astropy/issues/1721
"""
model = models.Trapezoid1DModel(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
|
Add test for Trapezoid1D model
|
Add test for Trapezoid1D model
|
Python
|
bsd-3-clause
|
AustereCuriosity/astropy,astropy/astropy,pllim/astropy,mhvk/astropy,larrybradley/astropy,tbabej/astropy,StuartLittlefair/astropy,astropy/astropy,aleksandr-bakanov/astropy,stargaser/astropy,pllim/astropy,joergdietrich/astropy,astropy/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,saimn/astropy,kelle/astropy,larrybradley/astropy,funbaker/astropy,mhvk/astropy,dhomeier/astropy,lpsinger/astropy,lpsinger/astropy,pllim/astropy,DougBurke/astropy,DougBurke/astropy,StuartLittlefair/astropy,MSeifert04/astropy,mhvk/astropy,funbaker/astropy,astropy/astropy,funbaker/astropy,tbabej/astropy,joergdietrich/astropy,dhomeier/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,aleksandr-bakanov/astropy,pllim/astropy,joergdietrich/astropy,saimn/astropy,stargaser/astropy,MSeifert04/astropy,bsipocz/astropy,pllim/astropy,dhomeier/astropy,larrybradley/astropy,saimn/astropy,joergdietrich/astropy,bsipocz/astropy,larrybradley/astropy,kelle/astropy,kelle/astropy,lpsinger/astropy,MSeifert04/astropy,joergdietrich/astropy,tbabej/astropy,stargaser/astropy,tbabej/astropy,dhomeier/astropy,mhvk/astropy,stargaser/astropy,saimn/astropy,StuartLittlefair/astropy,dhomeier/astropy,AustereCuriosity/astropy,saimn/astropy,StuartLittlefair/astropy,funbaker/astropy,StuartLittlefair/astropy,kelle/astropy,MSeifert04/astropy,tbabej/astropy,bsipocz/astropy,AustereCuriosity/astropy,astropy/astropy,AustereCuriosity/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,kelle/astropy,bsipocz/astropy,DougBurke/astropy,AustereCuriosity/astropy
|
Add test for Trapezoid1D model
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from numpy.testing import assert_allclose
from .. import models
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
def test_Trapezoid1DModel():
"""Regression test for
https://github.com/astropy/astropy/issues/1721
"""
model = models.Trapezoid1DModel(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
|
<commit_before><commit_msg>Add test for Trapezoid1D model<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from numpy.testing import assert_allclose
from .. import models
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
def test_Trapezoid1DModel():
"""Regression test for
https://github.com/astropy/astropy/issues/1721
"""
model = models.Trapezoid1DModel(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
|
Add test for Trapezoid1D model# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from numpy.testing import assert_allclose
from .. import models
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
def test_Trapezoid1DModel():
"""Regression test for
https://github.com/astropy/astropy/issues/1721
"""
model = models.Trapezoid1DModel(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
|
<commit_before><commit_msg>Add test for Trapezoid1D model<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from numpy.testing import assert_allclose
from .. import models
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
def test_Trapezoid1DModel():
"""Regression test for
https://github.com/astropy/astropy/issues/1721
"""
model = models.Trapezoid1DModel(amplitude=4.2, x_0=2.0, width=1.0, slope=3)
xx = np.linspace(0, 4, 8)
yy = model(xx)
yy_ref = [0., 1.41428571, 3.12857143, 4.2, 4.2, 3.12857143, 1.41428571, 0.]
assert_allclose(yy, yy_ref, rtol=0, atol=1e-6)
|
|
f1e1cc28ff00ed3b54d6eb1f4a77290c8ee7f7b2
|
demo/guide-python/sklearn_evals_result.py
|
demo/guide-python/sklearn_evals_result.py
|
##
# This script demonstrate how to access the xgboost eval metrics by using sklearn
##
import xgboost as xgb
import numpy as np
from sklearn.datasets import make_hastie_10_2
X, y = make_hastie_10_2(n_samples=2000, random_state=42)
# Map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:1600], X[1600:]
y_train, y_test = y[:1600], y[1600:]
param_dist = {'objective':'binary:logistic', 'n_estimators':2}
clf = xgb.XGBModel(**param_dist)
# Or you can use: clf = xgb.XGBClassifier(**param_dist)
clf.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_test, y_test)],
eval_metric='logloss',
verbose=True)
# Load evals result by calling the evals_result() function
evals_result = clf.evals_result()
print('Access logloss metric directly from validation_0:')
print(evals_result['validation_0']['logloss'])
print('')
print('Access metrics through a loop:')
for e_name, e_mtrs in evals_result.items():
print('- {}'.format(e_name))
for e_mtr_name, e_mtr_vals in e_mtrs.items():
print(' - {}'.format(e_mtr_name))
print(' - {}'.format(e_mtr_vals))
print('')
print('Access complete dict:')
print(evals_result)
|
Access xgboost eval metrics by using sklearn
|
Access xgboost eval metrics by using sklearn
|
Python
|
apache-2.0
|
dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost,dmlc/xgboost
|
Access xgboost eval metrics by using sklearn
|
##
# This script demonstrate how to access the xgboost eval metrics by using sklearn
##
import xgboost as xgb
import numpy as np
from sklearn.datasets import make_hastie_10_2
X, y = make_hastie_10_2(n_samples=2000, random_state=42)
# Map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:1600], X[1600:]
y_train, y_test = y[:1600], y[1600:]
param_dist = {'objective':'binary:logistic', 'n_estimators':2}
clf = xgb.XGBModel(**param_dist)
# Or you can use: clf = xgb.XGBClassifier(**param_dist)
clf.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_test, y_test)],
eval_metric='logloss',
verbose=True)
# Load evals result by calling the evals_result() function
evals_result = clf.evals_result()
print('Access logloss metric directly from validation_0:')
print(evals_result['validation_0']['logloss'])
print('')
print('Access metrics through a loop:')
for e_name, e_mtrs in evals_result.items():
print('- {}'.format(e_name))
for e_mtr_name, e_mtr_vals in e_mtrs.items():
print(' - {}'.format(e_mtr_name))
print(' - {}'.format(e_mtr_vals))
print('')
print('Access complete dict:')
print(evals_result)
|
<commit_before><commit_msg>Access xgboost eval metrics by using sklearn<commit_after>
|
##
# This script demonstrate how to access the xgboost eval metrics by using sklearn
##
import xgboost as xgb
import numpy as np
from sklearn.datasets import make_hastie_10_2
X, y = make_hastie_10_2(n_samples=2000, random_state=42)
# Map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:1600], X[1600:]
y_train, y_test = y[:1600], y[1600:]
param_dist = {'objective':'binary:logistic', 'n_estimators':2}
clf = xgb.XGBModel(**param_dist)
# Or you can use: clf = xgb.XGBClassifier(**param_dist)
clf.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_test, y_test)],
eval_metric='logloss',
verbose=True)
# Load evals result by calling the evals_result() function
evals_result = clf.evals_result()
print('Access logloss metric directly from validation_0:')
print(evals_result['validation_0']['logloss'])
print('')
print('Access metrics through a loop:')
for e_name, e_mtrs in evals_result.items():
print('- {}'.format(e_name))
for e_mtr_name, e_mtr_vals in e_mtrs.items():
print(' - {}'.format(e_mtr_name))
print(' - {}'.format(e_mtr_vals))
print('')
print('Access complete dict:')
print(evals_result)
|
Access xgboost eval metrics by using sklearn##
# This script demonstrate how to access the xgboost eval metrics by using sklearn
##
import xgboost as xgb
import numpy as np
from sklearn.datasets import make_hastie_10_2
X, y = make_hastie_10_2(n_samples=2000, random_state=42)
# Map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:1600], X[1600:]
y_train, y_test = y[:1600], y[1600:]
param_dist = {'objective':'binary:logistic', 'n_estimators':2}
clf = xgb.XGBModel(**param_dist)
# Or you can use: clf = xgb.XGBClassifier(**param_dist)
clf.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_test, y_test)],
eval_metric='logloss',
verbose=True)
# Load evals result by calling the evals_result() function
evals_result = clf.evals_result()
print('Access logloss metric directly from validation_0:')
print(evals_result['validation_0']['logloss'])
print('')
print('Access metrics through a loop:')
for e_name, e_mtrs in evals_result.items():
print('- {}'.format(e_name))
for e_mtr_name, e_mtr_vals in e_mtrs.items():
print(' - {}'.format(e_mtr_name))
print(' - {}'.format(e_mtr_vals))
print('')
print('Access complete dict:')
print(evals_result)
|
<commit_before><commit_msg>Access xgboost eval metrics by using sklearn<commit_after>##
# This script demonstrate how to access the xgboost eval metrics by using sklearn
##
import xgboost as xgb
import numpy as np
from sklearn.datasets import make_hastie_10_2
X, y = make_hastie_10_2(n_samples=2000, random_state=42)
# Map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:1600], X[1600:]
y_train, y_test = y[:1600], y[1600:]
param_dist = {'objective':'binary:logistic', 'n_estimators':2}
clf = xgb.XGBModel(**param_dist)
# Or you can use: clf = xgb.XGBClassifier(**param_dist)
clf.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_test, y_test)],
eval_metric='logloss',
verbose=True)
# Load evals result by calling the evals_result() function
evals_result = clf.evals_result()
print('Access logloss metric directly from validation_0:')
print(evals_result['validation_0']['logloss'])
print('')
print('Access metrics through a loop:')
for e_name, e_mtrs in evals_result.items():
print('- {}'.format(e_name))
for e_mtr_name, e_mtr_vals in e_mtrs.items():
print(' - {}'.format(e_mtr_name))
print(' - {}'.format(e_mtr_vals))
print('')
print('Access complete dict:')
print(evals_result)
|
|
7ab7e276654860a7e79fda95313fffa82f4e8fee
|
integration-test/473-landuse-tier.py
|
integration-test/473-landuse-tier.py
|
# http://www.openstreetmap.org/way/167274589
# area 300363008
assert_has_feature(
16, 10818, 21900, 'landuse',
{ 'kind': 'national_park', 'id': 167274589, 'tier': 1,
'min_zoom': 3 })
# http://www.openstreetmap.org/relation/921675
# area 30089300
assert_has_feature(
16, 14579, 29651, 'landuse',
{ 'kind': 'national_park', 'id': -921675, 'tier': 1,
'min_zoom': 8 })
assert_no_matching_feature(
7, 28, 57, 'landuse',
{ 'kind': 'national_park', 'id': -921675 })
# this is USFS, so demoted to tier 2 :-(
# http://www.openstreetmap.org/way/34416231
# area 86685400
assert_has_feature(
16, 18270, 25157, 'landuse',
{ 'kind': 'national_park', 'id': 34416231,
'tier': 2, 'min_zoom': 8 })
|
Add test for landuse tiers.
|
Add test for landuse tiers.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
Add test for landuse tiers.
|
# http://www.openstreetmap.org/way/167274589
# area 300363008
assert_has_feature(
16, 10818, 21900, 'landuse',
{ 'kind': 'national_park', 'id': 167274589, 'tier': 1,
'min_zoom': 3 })
# http://www.openstreetmap.org/relation/921675
# area 30089300
assert_has_feature(
16, 14579, 29651, 'landuse',
{ 'kind': 'national_park', 'id': -921675, 'tier': 1,
'min_zoom': 8 })
assert_no_matching_feature(
7, 28, 57, 'landuse',
{ 'kind': 'national_park', 'id': -921675 })
# this is USFS, so demoted to tier 2 :-(
# http://www.openstreetmap.org/way/34416231
# area 86685400
assert_has_feature(
16, 18270, 25157, 'landuse',
{ 'kind': 'national_park', 'id': 34416231,
'tier': 2, 'min_zoom': 8 })
|
<commit_before><commit_msg>Add test for landuse tiers.<commit_after>
|
# http://www.openstreetmap.org/way/167274589
# area 300363008
assert_has_feature(
16, 10818, 21900, 'landuse',
{ 'kind': 'national_park', 'id': 167274589, 'tier': 1,
'min_zoom': 3 })
# http://www.openstreetmap.org/relation/921675
# area 30089300
assert_has_feature(
16, 14579, 29651, 'landuse',
{ 'kind': 'national_park', 'id': -921675, 'tier': 1,
'min_zoom': 8 })
assert_no_matching_feature(
7, 28, 57, 'landuse',
{ 'kind': 'national_park', 'id': -921675 })
# this is USFS, so demoted to tier 2 :-(
# http://www.openstreetmap.org/way/34416231
# area 86685400
assert_has_feature(
16, 18270, 25157, 'landuse',
{ 'kind': 'national_park', 'id': 34416231,
'tier': 2, 'min_zoom': 8 })
|
Add test for landuse tiers.# http://www.openstreetmap.org/way/167274589
# area 300363008
assert_has_feature(
16, 10818, 21900, 'landuse',
{ 'kind': 'national_park', 'id': 167274589, 'tier': 1,
'min_zoom': 3 })
# http://www.openstreetmap.org/relation/921675
# area 30089300
assert_has_feature(
16, 14579, 29651, 'landuse',
{ 'kind': 'national_park', 'id': -921675, 'tier': 1,
'min_zoom': 8 })
assert_no_matching_feature(
7, 28, 57, 'landuse',
{ 'kind': 'national_park', 'id': -921675 })
# this is USFS, so demoted to tier 2 :-(
# http://www.openstreetmap.org/way/34416231
# area 86685400
assert_has_feature(
16, 18270, 25157, 'landuse',
{ 'kind': 'national_park', 'id': 34416231,
'tier': 2, 'min_zoom': 8 })
|
<commit_before><commit_msg>Add test for landuse tiers.<commit_after># http://www.openstreetmap.org/way/167274589
# area 300363008
assert_has_feature(
16, 10818, 21900, 'landuse',
{ 'kind': 'national_park', 'id': 167274589, 'tier': 1,
'min_zoom': 3 })
# http://www.openstreetmap.org/relation/921675
# area 30089300
assert_has_feature(
16, 14579, 29651, 'landuse',
{ 'kind': 'national_park', 'id': -921675, 'tier': 1,
'min_zoom': 8 })
assert_no_matching_feature(
7, 28, 57, 'landuse',
{ 'kind': 'national_park', 'id': -921675 })
# this is USFS, so demoted to tier 2 :-(
# http://www.openstreetmap.org/way/34416231
# area 86685400
assert_has_feature(
16, 18270, 25157, 'landuse',
{ 'kind': 'national_park', 'id': 34416231,
'tier': 2, 'min_zoom': 8 })
|
|
02f4211fa09e8bd9f4a54d296e1f9bc7dadd5452
|
dynd/tests/test_nd_groupby.py
|
dynd/tests/test_nd_groupby.py
|
import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.ndobject([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
udtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
|
Add test of groupby with immutable 'by'
|
Add test of groupby with immutable 'by'
|
Python
|
bsd-2-clause
|
pombredanne/dynd-python,mwiebe/dynd-python,izaid/dynd-python,aterrel/dynd-python,pombredanne/dynd-python,michaelpacer/dynd-python,aterrel/dynd-python,izaid/dynd-python,mwiebe/dynd-python,aterrel/dynd-python,insertinterestingnamehere/dynd-python,ContinuumIO/dynd-python,insertinterestingnamehere/dynd-python,cpcloud/dynd-python,izaid/dynd-python,ContinuumIO/dynd-python,mwiebe/dynd-python,cpcloud/dynd-python,mwiebe/dynd-python,izaid/dynd-python,michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,ContinuumIO/dynd-python,michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,aterrel/dynd-python,ContinuumIO/dynd-python,michaelpacer/dynd-python,pombredanne/dynd-python,cpcloud/dynd-python,cpcloud/dynd-python,pombredanne/dynd-python
|
Add test of groupby with immutable 'by'
|
import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.ndobject([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
udtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test of groupby with immutable 'by'<commit_after>
|
import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.ndobject([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
udtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
|
Add test of groupby with immutable 'by'import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.ndobject([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
udtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test of groupby with immutable 'by'<commit_after>import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.ndobject([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
udtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
|
|
9d9d46589852cb67b1cee3810d04ea88f8775f06
|
kboard/board/templatetags/hide_ip.py
|
kboard/board/templatetags/hide_ip.py
|
import re
from django import template
register = template.Library()
@register.simple_tag
def hide_ip(ip):
m = re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(ip))
if m is not None:
ip_arr = str(ip).split('.')
ip_arr[2] = 'xxx'
return '.'.join(ip_arr)
else:
return str(ip)
|
Add hide ip template tag
|
Add hide ip template tag
|
Python
|
mit
|
cjh5414/kboard,kboard/kboard,guswnsxodlf/k-board,cjh5414/kboard,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,kboard/kboard,kboard/kboard,hyesun03/k-board,hyesun03/k-board,darjeeling/k-board,guswnsxodlf/k-board
|
Add hide ip template tag
|
import re
from django import template
register = template.Library()
@register.simple_tag
def hide_ip(ip):
m = re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(ip))
if m is not None:
ip_arr = str(ip).split('.')
ip_arr[2] = 'xxx'
return '.'.join(ip_arr)
else:
return str(ip)
|
<commit_before><commit_msg>Add hide ip template tag<commit_after>
|
import re
from django import template
register = template.Library()
@register.simple_tag
def hide_ip(ip):
m = re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(ip))
if m is not None:
ip_arr = str(ip).split('.')
ip_arr[2] = 'xxx'
return '.'.join(ip_arr)
else:
return str(ip)
|
Add hide ip template tagimport re
from django import template
register = template.Library()
@register.simple_tag
def hide_ip(ip):
m = re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(ip))
if m is not None:
ip_arr = str(ip).split('.')
ip_arr[2] = 'xxx'
return '.'.join(ip_arr)
else:
return str(ip)
|
<commit_before><commit_msg>Add hide ip template tag<commit_after>import re
from django import template
register = template.Library()
@register.simple_tag
def hide_ip(ip):
m = re.match('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', str(ip))
if m is not None:
ip_arr = str(ip).split('.')
ip_arr[2] = 'xxx'
return '.'.join(ip_arr)
else:
return str(ip)
|
|
5d8cc197251ec4d19028e36ea400d4d6354d09ad
|
pythran/tests/cases/rosen.py
|
pythran/tests/cases/rosen.py
|
import numpy as np
#runas import numpy as np; r = np.arange(1000000); rosen(r)
#pythran export rosen(float[])
def rosen(x):
t0 = 100 * (x[1:] - x[:-1] ** 2) ** 2
t1 = (1 - x[:-1]) ** 2
return np.sum(t0 + t1)
|
Add good code for lazy analysis
|
Add good code for lazy analysis
|
Python
|
bsd-3-clause
|
serge-sans-paille/pythran,pbrunet/pythran,pbrunet/pythran,pombredanne/pythran,pombredanne/pythran,hainm/pythran,serge-sans-paille/pythran,artas360/pythran,artas360/pythran,artas360/pythran,pbrunet/pythran,pombredanne/pythran,hainm/pythran,hainm/pythran
|
Add good code for lazy analysis
|
import numpy as np
#runas import numpy as np; r = np.arange(1000000); rosen(r)
#pythran export rosen(float[])
def rosen(x):
t0 = 100 * (x[1:] - x[:-1] ** 2) ** 2
t1 = (1 - x[:-1]) ** 2
return np.sum(t0 + t1)
|
<commit_before><commit_msg>Add good code for lazy analysis<commit_after>
|
import numpy as np
#runas import numpy as np; r = np.arange(1000000); rosen(r)
#pythran export rosen(float[])
def rosen(x):
t0 = 100 * (x[1:] - x[:-1] ** 2) ** 2
t1 = (1 - x[:-1]) ** 2
return np.sum(t0 + t1)
|
Add good code for lazy analysisimport numpy as np
#runas import numpy as np; r = np.arange(1000000); rosen(r)
#pythran export rosen(float[])
def rosen(x):
t0 = 100 * (x[1:] - x[:-1] ** 2) ** 2
t1 = (1 - x[:-1]) ** 2
return np.sum(t0 + t1)
|
<commit_before><commit_msg>Add good code for lazy analysis<commit_after>import numpy as np
#runas import numpy as np; r = np.arange(1000000); rosen(r)
#pythran export rosen(float[])
def rosen(x):
t0 = 100 * (x[1:] - x[:-1] ** 2) ** 2
t1 = (1 - x[:-1]) ** 2
return np.sum(t0 + t1)
|
|
03d1deca42c9ee2a3d2b51d37e0e6b41485faa10
|
tools/convert-url-history.py
|
tools/convert-url-history.py
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
Add initial URL history importer
|
Add initial URL history importer
|
Python
|
apache-2.0
|
erasilva/namebench,MANICX100/namebench,kevinxw/namebench,hashem78/namebench,deeb230/namebench,omerhasan/namebench,somehume/namebench,crocleco/namebench,watchamakulit02/namebench,ItsAGeekThing/namebench,bluemask2001/namebench,chosen1/namebench,Forgen/namebench,kristi29091988/namebench,LavyshAlexander/namebench,antsant/namebench,Trinitaria/namebench,ZuluPro/namebench,lukasfenix/namebench,yiyuandao/namebench,Jasoning/namebench,el-lumbergato/namebench,doantranhoang/namebench,AViisiion/namebench,pombreda/namebench,ajs124/namebench,sbalun/namebench,edesiocs/namebench,movermeyer/namebench,AgentN/namebench,jjoaonunes/namebench,Spindletop16/namebench,vishnunuk/namebench,when30/namebench,chamakov/namebench,phy0/namebench,xeoron/namebench,thiagomagero/namebench,stefrobb/namebench,cartersgenes/namebench,eladelad/namebench,HerlonNascimento/namebench,pacav69/namebench,razrichter/namebench,perrytm/namebench,manaure/namebench,ericmckean/namebench,feardax/namebench,doadin/namebench,corruptnova/namebench,21winner/namebench,FatBumbleee/namebench,benklaasen/namebench,TheNite/namebench,DanielAttia/namebench,etxc/namebench,KibaAmor/namebench,ajitsonlion/namebench,Ritvik1512/namebench,jackjshin/namebench,cvanwie/namebench,souzainf3/namebench,shannonjlove/namebench,rubasben/namebench,felipsmartins/namebench,jevgen/namebench,asolfre/namebench,gdbdzgd/namebench,jtrag/namebench,antar2801/namebench,fevangelou/namebench,palimadra/namebench,techsd/namebench,woozzoom/namebench,isoriss123/namebench,hypnotika/namebench,Bandito43/namebench,pyshcoder/namebench,accomac/namebench,jimb0616/namebench,santoshsahoo/namebench,gavinfaux/namebench,aman-tugnawat/namebench,hitrust/namebench,rosemead/namebench,tectronics/namebench,llaera/namebench,trulow/namebench,melissaihrig/namebench,MicroWorldwide/namebench,Jeff-Lewis/namebench,ronzohan/namebench,xxhank/namebench,Hazer/namebench,cloudcache/namebench,sund/namebench,unreal666/namebench,hwuiwon/namebench,danieljl/namebench,deepak5/namebench,thatchristoph/namebench,cah0211/namebench,tushevorg/namebench,CookiesandCake/namebench,LegitSavage/namebench,wa111/namebench,illAdvised/namebench,dimazalfrianz/namebench,Arrowofdarkness/namebench,webhost/namebench,imranrony/namebench,AdamHull/namebench,bgammill/namebench,KingPsychopath/namebench,Xeleste/namebench,leeoo/namebench,richardgroves/namebench,petabytekr/namebench,xubayer786/namebench,cyranodb/namebench,fbidu/namebench,tcffisher/namebench,dsjr2006/namebench,seshin/namebench,alexlovelltroy/namebench,Max-Vader/namebench,evelynmitchell/namebench,nadeemat/namebench,snailbob/namebench,nishad/namebench,siripuramrk/namebench,TorpedoXL/namebench,beermix/namebench,nt1st/namebench,edmilson19/namebench,jaded44/namebench,alebcay/namebench,teknix/namebench,jakeylube95/namebench,GLMeece/namebench,edumatos/namebench,RomanHargrave/namebench,mspringett/namebench,iamang/namebench,arjun372/namebench,qbektrix/namebench,skuarch/namebench,MarnuLombard/namebench,Kudeshido/namebench,michaeldavidcarr/namebench,ulaskaraoren/namebench,RichardWilliamPearse/namebench,thanhuwng/namebench,takuya/namebench,renanrodm/namebench,kiseok7/namebench,renatogames2/namebench,repomain/namebench,jaechankim/namebench,PyroShark/namebench,donavoncade/namebench,ran0101/namebench,rbenjamin/namebench,jlobaton/namebench,wluizguedes/namebench,CrazeeIvan/namebench,BeZazz/lamebench,uwevil/namebench,sushifant/namebench,mystique1029/namebench
|
Add initial URL history importer
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
<commit_before><commit_msg>Add initial URL history importer<commit_after>
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
Add initial URL history importer#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
<commit_before><commit_msg>Add initial URL history importer<commit_after>#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate a data file based on an input file containing URL's.
This outputs a "weighted" (top hits first) data file for namebench
to use. Ignores subsequent hits for the same site.
"""
import operator
import sys
import re
filename = sys.argv[1]
if not filename:
print "You must provide a filename."
sys.exit(1)
parse_re = re.compile(' \w+://([\-\w\.]+)')
hits = {}
last_host = None
for line in open(filename).readlines():
match = parse_re.search(line)
if match:
host = match.groups()[0] + '.'
if host != last_host:
hits[host] = hits.get(host, 0) + 1
last_host = host
top_hits = sorted(hits.items(), key=operator.itemgetter(1),reverse=True)
for (hit, count) in top_hits:
print 'A %s\t# %s hits' % (hit, count)
|
|
43c1b3976625a99fe67cae11ff162324aabc915f
|
numba/annotate/ir_capture.py
|
numba/annotate/ir_capture.py
|
# -*- coding: UTF-8 -*-
"""
Capture IR emissions.
"""
from __future__ import print_function, division, absolute_import
import collections
from .annotate import SourceIntermediate, Source
# ______________________________________________________________________
class LLVMIRBuilder(object):
def __init__(self, builder):
self.builder = builder
self.captured = collections.defaultdict(list)
def update_pos(self, pos):
self.pos = pos
def __getattr__(self, attr):
m = getattr(self.builder, attr)
if not callable(m):
return m
def emit(*args, **kwargs):
result = m(*args, **kwargs)
self.captured[self.pos].append(result)
return result
return emit
# ______________________________________________________________________
def get_annotations(ir_builder):
"Get annotations from an IR builder"
linenomap = collections.defaultdict(list)
linemap = {}
ir_lineno = 1
for pos, instrs in sorted(ir_builder.captured.iteritems()):
for instr in instrs:
linenomap[pos].append(ir_lineno)
linemap[ir_lineno] = str(instr)
ir_lineno += 1
source = Source(linemap, annotations=[])
return SourceIntermediate(linenomap, source)
# ______________________________________________________________________
|
Add some code to capture IR for annotation
|
Add some code to capture IR for annotation
|
Python
|
bsd-2-clause
|
ssarangi/numba,seibert/numba,sklam/numba,stonebig/numba,seibert/numba,jriehl/numba,cpcloud/numba,stefanseefeld/numba,pitrou/numba,GaZ3ll3/numba,seibert/numba,sklam/numba,stefanseefeld/numba,numba/numba,IntelLabs/numba,jriehl/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,gmarkall/numba,pitrou/numba,jriehl/numba,sklam/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,seibert/numba,stefanseefeld/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,stonebig/numba,stuartarchibald/numba,pombredanne/numba,gmarkall/numba,gdementen/numba,numba/numba,sklam/numba,gdementen/numba,cpcloud/numba,gmarkall/numba,IntelLabs/numba,gdementen/numba,gmarkall/numba,stonebig/numba,GaZ3ll3/numba,jriehl/numba,stonebig/numba,ssarangi/numba,stefanseefeld/numba,GaZ3ll3/numba,pitrou/numba,ssarangi/numba,jriehl/numba,seibert/numba,gdementen/numba,pitrou/numba,pitrou/numba,stefanseefeld/numba,stuartarchibald/numba,pombredanne/numba,gdementen/numba,stuartarchibald/numba,stonebig/numba,GaZ3ll3/numba,cpcloud/numba,numba/numba,cpcloud/numba,pombredanne/numba,stuartarchibald/numba,sklam/numba,ssarangi/numba,pombredanne/numba
|
Add some code to capture IR for annotation
|
# -*- coding: UTF-8 -*-
"""
Capture IR emissions.
"""
from __future__ import print_function, division, absolute_import
import collections
from .annotate import SourceIntermediate, Source
# ______________________________________________________________________
class LLVMIRBuilder(object):
def __init__(self, builder):
self.builder = builder
self.captured = collections.defaultdict(list)
def update_pos(self, pos):
self.pos = pos
def __getattr__(self, attr):
m = getattr(self.builder, attr)
if not callable(m):
return m
def emit(*args, **kwargs):
result = m(*args, **kwargs)
self.captured[self.pos].append(result)
return result
return emit
# ______________________________________________________________________
def get_annotations(ir_builder):
"Get annotations from an IR builder"
linenomap = collections.defaultdict(list)
linemap = {}
ir_lineno = 1
for pos, instrs in sorted(ir_builder.captured.iteritems()):
for instr in instrs:
linenomap[pos].append(ir_lineno)
linemap[ir_lineno] = str(instr)
ir_lineno += 1
source = Source(linemap, annotations=[])
return SourceIntermediate(linenomap, source)
# ______________________________________________________________________
|
<commit_before><commit_msg>Add some code to capture IR for annotation<commit_after>
|
# -*- coding: UTF-8 -*-
"""
Capture IR emissions.
"""
from __future__ import print_function, division, absolute_import
import collections
from .annotate import SourceIntermediate, Source
# ______________________________________________________________________
class LLVMIRBuilder(object):
def __init__(self, builder):
self.builder = builder
self.captured = collections.defaultdict(list)
def update_pos(self, pos):
self.pos = pos
def __getattr__(self, attr):
m = getattr(self.builder, attr)
if not callable(m):
return m
def emit(*args, **kwargs):
result = m(*args, **kwargs)
self.captured[self.pos].append(result)
return result
return emit
# ______________________________________________________________________
def get_annotations(ir_builder):
"Get annotations from an IR builder"
linenomap = collections.defaultdict(list)
linemap = {}
ir_lineno = 1
for pos, instrs in sorted(ir_builder.captured.iteritems()):
for instr in instrs:
linenomap[pos].append(ir_lineno)
linemap[ir_lineno] = str(instr)
ir_lineno += 1
source = Source(linemap, annotations=[])
return SourceIntermediate(linenomap, source)
# ______________________________________________________________________
|
Add some code to capture IR for annotation# -*- coding: UTF-8 -*-
"""
Capture IR emissions.
"""
from __future__ import print_function, division, absolute_import
import collections
from .annotate import SourceIntermediate, Source
# ______________________________________________________________________
class LLVMIRBuilder(object):
def __init__(self, builder):
self.builder = builder
self.captured = collections.defaultdict(list)
def update_pos(self, pos):
self.pos = pos
def __getattr__(self, attr):
m = getattr(self.builder, attr)
if not callable(m):
return m
def emit(*args, **kwargs):
result = m(*args, **kwargs)
self.captured[self.pos].append(result)
return result
return emit
# ______________________________________________________________________
def get_annotations(ir_builder):
"Get annotations from an IR builder"
linenomap = collections.defaultdict(list)
linemap = {}
ir_lineno = 1
for pos, instrs in sorted(ir_builder.captured.iteritems()):
for instr in instrs:
linenomap[pos].append(ir_lineno)
linemap[ir_lineno] = str(instr)
ir_lineno += 1
source = Source(linemap, annotations=[])
return SourceIntermediate(linenomap, source)
# ______________________________________________________________________
|
<commit_before><commit_msg>Add some code to capture IR for annotation<commit_after># -*- coding: UTF-8 -*-
"""
Capture IR emissions.
"""
from __future__ import print_function, division, absolute_import
import collections
from .annotate import SourceIntermediate, Source
# ______________________________________________________________________
class LLVMIRBuilder(object):
def __init__(self, builder):
self.builder = builder
self.captured = collections.defaultdict(list)
def update_pos(self, pos):
self.pos = pos
def __getattr__(self, attr):
m = getattr(self.builder, attr)
if not callable(m):
return m
def emit(*args, **kwargs):
result = m(*args, **kwargs)
self.captured[self.pos].append(result)
return result
return emit
# ______________________________________________________________________
def get_annotations(ir_builder):
"Get annotations from an IR builder"
linenomap = collections.defaultdict(list)
linemap = {}
ir_lineno = 1
for pos, instrs in sorted(ir_builder.captured.iteritems()):
for instr in instrs:
linenomap[pos].append(ir_lineno)
linemap[ir_lineno] = str(instr)
ir_lineno += 1
source = Source(linemap, annotations=[])
return SourceIntermediate(linenomap, source)
# ______________________________________________________________________
|
|
c3836bb9528c8dd45486ef0078f15e358a6e3977
|
ask_api_examples/list_all_c_and_fortran_models.py
|
ask_api_examples/list_all_c_and_fortran_models.py
|
"""Queries the CSDMS model repository for all models written in C,
C++, Fortran 77, or Fortran 90."""
from ask_api_examples import make_query
query = '[[Programming language::C||C++||Fortran77||Fortran90]]|limit=10000'
def main():
r = make_query(query, __file__)
return r
if __name__ == '__main__':
print main()
|
Add example to list all models written in Fortran or C/C++
|
Add example to list all models written in Fortran or C/C++
|
Python
|
mit
|
mdpiper/csdms-wiki-api-examples
|
Add example to list all models written in Fortran or C/C++
|
"""Queries the CSDMS model repository for all models written in C,
C++, Fortran 77, or Fortran 90."""
from ask_api_examples import make_query
query = '[[Programming language::C||C++||Fortran77||Fortran90]]|limit=10000'
def main():
r = make_query(query, __file__)
return r
if __name__ == '__main__':
print main()
|
<commit_before><commit_msg>Add example to list all models written in Fortran or C/C++<commit_after>
|
"""Queries the CSDMS model repository for all models written in C,
C++, Fortran 77, or Fortran 90."""
from ask_api_examples import make_query
query = '[[Programming language::C||C++||Fortran77||Fortran90]]|limit=10000'
def main():
r = make_query(query, __file__)
return r
if __name__ == '__main__':
print main()
|
Add example to list all models written in Fortran or C/C++"""Queries the CSDMS model repository for all models written in C,
C++, Fortran 77, or Fortran 90."""
from ask_api_examples import make_query
query = '[[Programming language::C||C++||Fortran77||Fortran90]]|limit=10000'
def main():
r = make_query(query, __file__)
return r
if __name__ == '__main__':
print main()
|
<commit_before><commit_msg>Add example to list all models written in Fortran or C/C++<commit_after>"""Queries the CSDMS model repository for all models written in C,
C++, Fortran 77, or Fortran 90."""
from ask_api_examples import make_query
query = '[[Programming language::C||C++||Fortran77||Fortran90]]|limit=10000'
def main():
r = make_query(query, __file__)
return r
if __name__ == '__main__':
print main()
|
|
9c767eba6236a6130f5985557aeb47d977079737
|
test/469-transit-features.py
|
test/469-transit-features.py
|
# way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
|
Add tests for transit features
|
Add tests for transit features
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
Add tests for transit features
|
# way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
|
<commit_before><commit_msg>Add tests for transit features<commit_after>
|
# way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
|
Add tests for transit features# way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
|
<commit_before><commit_msg>Add tests for transit features<commit_after># way 91806504
assert_has_feature(
16, 10470, 25316, 'transit',
{ 'kind': 'bus_stop' })
# node 1241518350
assert_has_feature(
16, 10480, 25332, 'transit',
{ 'kind': 'bus_stop' })
# way 196670577
assert_has_feature(
16, 10486, 25326, 'transit',
{ 'kind': 'platform' })
|
|
b504b7316ab53a8cf87d2a7c0d23d1fd43208727
|
cartridge/shop/management/commands/expirecarts.py
|
cartridge/shop/management/commands/expirecarts.py
|
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
from datetime import timedelta
class Command(BaseCommand):
help = 'Expire carts'
def handle(self, *args, **options):
old_carts = Cart.objects.filter(last_updated__lt=now()-timedelta(minutes=settings.SHOP_CART_EXPIRY_MINUTES))
for old_cart in old_carts:
for item in old_cart.items.all():
item.delete()
old_cart.delete()
|
Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutes
|
Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutes
|
Python
|
bsd-2-clause
|
jaywink/cartridge-reservable,jaywink/cartridge-reservable,jaywink/cartridge-reservable
|
Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutes
|
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
from datetime import timedelta
class Command(BaseCommand):
help = 'Expire carts'
def handle(self, *args, **options):
old_carts = Cart.objects.filter(last_updated__lt=now()-timedelta(minutes=settings.SHOP_CART_EXPIRY_MINUTES))
for old_cart in old_carts:
for item in old_cart.items.all():
item.delete()
old_cart.delete()
|
<commit_before><commit_msg>Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutes<commit_after>
|
from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
from datetime import timedelta
class Command(BaseCommand):
help = 'Expire carts'
def handle(self, *args, **options):
old_carts = Cart.objects.filter(last_updated__lt=now()-timedelta(minutes=settings.SHOP_CART_EXPIRY_MINUTES))
for old_cart in old_carts:
for item in old_cart.items.all():
item.delete()
old_cart.delete()
|
Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutesfrom django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
from datetime import timedelta
class Command(BaseCommand):
help = 'Expire carts'
def handle(self, *args, **options):
old_carts = Cart.objects.filter(last_updated__lt=now()-timedelta(minutes=settings.SHOP_CART_EXPIRY_MINUTES))
for old_cart in old_carts:
for item in old_cart.items.all():
item.delete()
old_cart.delete()
|
<commit_before><commit_msg>Add management command expire carts to expire carts [upstreamcand]. Well of course this should probably happen automatically, but a workaround for now to cronjob this every few minutes<commit_after>from django.core.management.base import BaseCommand
from django.core.management.base import CommandError
from mezzanine.conf import settings
from cartridge.shop.models import *
from datetime import timedelta
class Command(BaseCommand):
help = 'Expire carts'
def handle(self, *args, **options):
old_carts = Cart.objects.filter(last_updated__lt=now()-timedelta(minutes=settings.SHOP_CART_EXPIRY_MINUTES))
for old_cart in old_carts:
for item in old_cart.items.all():
item.delete()
old_cart.delete()
|
|
b34e80b1bb387ba833622009a99d8b7301bb649f
|
tests/compiler/test_method_call_compilation.py
|
tests/compiler/test_method_call_compilation.py
|
from tests.compiler import compile_local, SELF_ID, LST_ID, VAL1_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodePushIndexImmediate, OpcodeCallInternal, OpcodePushMember, \
OpcodePushIndex
def test_access_in_method_args():
assert compile_local('self.action(lst[123])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushIndexImmediate(123),
OpcodeCallInternal(0, 3)
]
assert compile_local('self.action(lst[self.val1])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushMember(SELF_ID, VAL1_ID),
OpcodePushIndex(),
OpcodeCallInternal(0, 3)
]
|
Add test for method call compilation
|
Add test for method call compilation
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Add test for method call compilation
|
from tests.compiler import compile_local, SELF_ID, LST_ID, VAL1_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodePushIndexImmediate, OpcodeCallInternal, OpcodePushMember, \
OpcodePushIndex
def test_access_in_method_args():
assert compile_local('self.action(lst[123])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushIndexImmediate(123),
OpcodeCallInternal(0, 3)
]
assert compile_local('self.action(lst[self.val1])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushMember(SELF_ID, VAL1_ID),
OpcodePushIndex(),
OpcodeCallInternal(0, 3)
]
|
<commit_before><commit_msg>Add test for method call compilation<commit_after>
|
from tests.compiler import compile_local, SELF_ID, LST_ID, VAL1_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodePushIndexImmediate, OpcodeCallInternal, OpcodePushMember, \
OpcodePushIndex
def test_access_in_method_args():
assert compile_local('self.action(lst[123])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushIndexImmediate(123),
OpcodeCallInternal(0, 3)
]
assert compile_local('self.action(lst[self.val1])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushMember(SELF_ID, VAL1_ID),
OpcodePushIndex(),
OpcodeCallInternal(0, 3)
]
|
Add test for method call compilationfrom tests.compiler import compile_local, SELF_ID, LST_ID, VAL1_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodePushIndexImmediate, OpcodeCallInternal, OpcodePushMember, \
OpcodePushIndex
def test_access_in_method_args():
assert compile_local('self.action(lst[123])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushIndexImmediate(123),
OpcodeCallInternal(0, 3)
]
assert compile_local('self.action(lst[self.val1])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushMember(SELF_ID, VAL1_ID),
OpcodePushIndex(),
OpcodeCallInternal(0, 3)
]
|
<commit_before><commit_msg>Add test for method call compilation<commit_after>from tests.compiler import compile_local, SELF_ID, LST_ID, VAL1_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodePushIndexImmediate, OpcodeCallInternal, OpcodePushMember, \
OpcodePushIndex
def test_access_in_method_args():
assert compile_local('self.action(lst[123])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushIndexImmediate(123),
OpcodeCallInternal(0, 3)
]
assert compile_local('self.action(lst[self.val1])') == [
OpcodePushLocal(SELF_ID),
OpcodePushLocal(LST_ID),
OpcodePushMember(SELF_ID, VAL1_ID),
OpcodePushIndex(),
OpcodeCallInternal(0, 3)
]
|
|
b38f7748c2bf51bf99d187d2b7952af0def893d1
|
pylinks/links/migrations/0002_auto_20170703_1843.py
|
pylinks/links/migrations/0002_auto_20170703_1843.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pylinks.links.utils
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='link',
name='file',
field=pylinks.links.utils.LinkFileField(default=None, max_length=500, null=True, help_text=b'A file to be uploaded and linked to instead of the URL.', blank=True),
preserve_default=True,
),
]
|
Add migration to switch to Uploadcare
|
Add migration to switch to Uploadcare
|
Python
|
mit
|
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
|
Add migration to switch to Uploadcare
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pylinks.links.utils
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='link',
name='file',
field=pylinks.links.utils.LinkFileField(default=None, max_length=500, null=True, help_text=b'A file to be uploaded and linked to instead of the URL.', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration to switch to Uploadcare<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pylinks.links.utils
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='link',
name='file',
field=pylinks.links.utils.LinkFileField(default=None, max_length=500, null=True, help_text=b'A file to be uploaded and linked to instead of the URL.', blank=True),
preserve_default=True,
),
]
|
Add migration to switch to Uploadcare# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pylinks.links.utils
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='link',
name='file',
field=pylinks.links.utils.LinkFileField(default=None, max_length=500, null=True, help_text=b'A file to be uploaded and linked to instead of the URL.', blank=True),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migration to switch to Uploadcare<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import pylinks.links.utils
class Migration(migrations.Migration):
dependencies = [
('links', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='link',
name='file',
field=pylinks.links.utils.LinkFileField(default=None, max_length=500, null=True, help_text=b'A file to be uploaded and linked to instead of the URL.', blank=True),
preserve_default=True,
),
]
|
|
b6c2c53a73397637c8852187315b74cd8e52c908
|
scripts/migration/migrate_unregistered_user_emails.py
|
scripts/migration/migrate_unregistered_user_emails.py
|
"""Removes User.username from User.emails for unregistered users.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
from tests import factories
from tests.base import OsfTestCase
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with unconfirmed email"
"s")
for user in get_users_with_unconfirmed_emails():
remove_unconfirmed_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_unconfirmed_emails():
return models.User.find(
Q('date_confirmed', 'eq', None)
& Q('emails', 'ne', [])
)
def remove_unconfirmed_emails(user):
user.emails = []
class TestMigrateUnconfirmedEmails(OsfTestCase):
def setUp(self):
super(TestMigrateUnconfirmedEmails, self).setUp()
self.registered_user = factories.UserFactory()
self.unconfirmed = factories.UnconfirmedUserFactory()
self.unregistered = factories.UnregUserFactory()
self.unregistered.emails = [self.unregistered.username]
self.unregistered.save()
def tearDown(self):
super(TestMigrateUnconfirmedEmails, self).tearDown()
models.User.remove()
def test_get_users(self):
self.unregistered.reload()
assert_equal(
list(get_users_with_unconfirmed_emails()),
[self.unregistered]
)
def test_fix_user(self):
remove_unconfirmed_emails(self.unregistered)
assert_equal(
self.unregistered.emails,
[]
)
if __name__ == '__main__':
main()
|
Add migration script for users with unconfirmed emails
|
Add migration script for users with unconfirmed emails
|
Python
|
apache-2.0
|
Johnetordoff/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,reinaH/osf.io,KAsante95/osf.io,caneruguz/osf.io,KAsante95/osf.io,emetsger/osf.io,TomBaxter/osf.io,acshi/osf.io,sbt9uc/osf.io,doublebits/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,njantrania/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,acshi/osf.io,GageGaskins/osf.io,revanthkolli/osf.io,jmcarp/osf.io,icereval/osf.io,petermalcolm/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,fabianvf/osf.io,brandonPurvis/osf.io,emetsger/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,samanehsan/osf.io,samanehsan/osf.io,wearpants/osf.io,billyhunt/osf.io,cosenal/osf.io,petermalcolm/osf.io,cslzchen/osf.io,mluke93/osf.io,danielneis/osf.io,leb2dg/osf.io,samchrisinger/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,erinspace/osf.io,lamdnhan/osf.io,revanthkolli/osf.io,mluke93/osf.io,doublebits/osf.io,crcresearch/osf.io,jinluyuan/osf.io,chennan47/osf.io,cosenal/osf.io,Ghalko/osf.io,ZobairAlijan/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,felliott/osf.io,acshi/osf.io,hmoco/osf.io,mluo613/osf.io,ckc6cz/osf.io,icereval/osf.io,ckc6cz/osf.io,caseyrollins/osf.io,wearpants/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,zkraime/osf.io,sloria/osf.io,cldershem/osf.io,jinluyuan/osf.io,crcresearch/osf.io,bdyetton/prettychart,SSJohns/osf.io,asanfilippo7/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,mfraezz/osf.io,alexschiller/osf.io,samchrisinger/osf.io,petermalcolm/osf.io,dplorimer/osf,Nesiehr/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,felliott/osf.io,samanehsan/osf.io,zachjanicki/osf.io,arpitar/osf.io,kch8qx/osf.io,KAsante95/osf.io,barbour-em/osf.io,asanfilippo7/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,dplorimer/osf,sbt9uc/osf.io,brandonPurvis/osf.io,lyndsysimon/osf.io,baylee-d/osf.io,arpitar/osf.io,caseyrygt/osf.io,chrisseto/osf.io,billyhunt/osf.io,emetsger/osf.io,brianjgeiger/osf.io,danielneis/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,leb2dg/osf.io,jinluyuan/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,sloria/osf.io,aaxelb/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,njantrania/osf.io,reinaH/osf.io,billyhunt/osf.io,crcresearch/osf.io,GageGaskins/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,jolene-esposito/osf.io,acshi/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,GaryKriebel/osf.io,jnayak1/osf.io,icereval/osf.io,RomanZWang/osf.io,binoculars/osf.io,hmoco/osf.io,caseyrygt/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,revanthkolli/osf.io,acshi/osf.io,GageGaskins/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,felliott/osf.io,baylee-d/osf.io,jnayak1/osf.io,KAsante95/osf.io,jeffreyliu3230/osf.io,HalcyonChimera/osf.io,barbour-em/osf.io,lamdnhan/osf.io,zkraime/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,mattclark/osf.io,GaryKriebel/osf.io,erinspace/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,cwisecarver/osf.io,ckc6cz/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,erinspace/osf.io,zachjanicki/osf.io,danielneis/osf.io,bdyetton/prettychart,mluo613/osf.io,caseyrollins/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,abought/osf.io,kch8qx/osf.io,alexschiller/osf.io,reinaH/osf.io,mluke93/osf.io,njantrania/osf.io,chrisseto/osf.io,revanthkolli/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,jmcarp/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,leb2dg/osf.io,arpitar/osf.io,doublebits/osf.io,GageGaskins/osf.io,MerlinZhang/osf.io,cslzchen/osf.io,sbt9uc/osf.io,adlius/osf.io,HarryRybacki/osf.io,aaxelb/osf.io,reinaH/osf.io,kwierman/osf.io,doublebits/osf.io,cwisecarver/osf.io,ckc6cz/osf.io,bdyetton/prettychart,Johnetordoff/osf.io,pattisdr/osf.io,jolene-esposito/osf.io,ticklemepierce/osf.io,zkraime/osf.io,njantrania/osf.io,zamattiac/osf.io,chennan47/osf.io,HarryRybacki/osf.io,adlius/osf.io,doublebits/osf.io,chrisseto/osf.io,alexschiller/osf.io,KAsante95/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,felliott/osf.io,sbt9uc/osf.io,bdyetton/prettychart,RomanZWang/osf.io,fabianvf/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,rdhyee/osf.io,abought/osf.io,RomanZWang/osf.io,abought/osf.io,mattclark/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,GaryKriebel/osf.io,cosenal/osf.io,pattisdr/osf.io,dplorimer/osf,mluke93/osf.io,amyshi188/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,mluo613/osf.io,pattisdr/osf.io,lamdnhan/osf.io,aaxelb/osf.io,caneruguz/osf.io,barbour-em/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,cldershem/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,abought/osf.io,adlius/osf.io,zamattiac/osf.io,amyshi188/osf.io,cldershem/osf.io,Nesiehr/osf.io,emetsger/osf.io,lyndsysimon/osf.io,jmcarp/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,danielneis/osf.io,DanielSBrown/osf.io,kwierman/osf.io,mfraezz/osf.io,billyhunt/osf.io,adlius/osf.io,jolene-esposito/osf.io,samchrisinger/osf.io,SSJohns/osf.io,SSJohns/osf.io,wearpants/osf.io,caseyrygt/osf.io,brandonPurvis/osf.io,Ghalko/osf.io,amyshi188/osf.io,rdhyee/osf.io,chennan47/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,binoculars/osf.io,mattclark/osf.io,binoculars/osf.io,jnayak1/osf.io,jinluyuan/osf.io,cldershem/osf.io,caseyrygt/osf.io,Ghalko/osf.io,jolene-esposito/osf.io,jmcarp/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,arpitar/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,Johnetordoff/osf.io,zkraime/osf.io,RomanZWang/osf.io,kwierman/osf.io,fabianvf/osf.io,barbour-em/osf.io,sloria/osf.io,GageGaskins/osf.io,haoyuchen1992/osf.io,HarryRybacki/osf.io,lamdnhan/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,hmoco/osf.io,lyndsysimon/osf.io,jeffreyliu3230/osf.io,dplorimer/osf,baylee-d/osf.io,mluo613/osf.io,cslzchen/osf.io,fabianvf/osf.io,mluo613/osf.io
|
Add migration script for users with unconfirmed emails
|
"""Removes User.username from User.emails for unregistered users.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
from tests import factories
from tests.base import OsfTestCase
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with unconfirmed email"
"s")
for user in get_users_with_unconfirmed_emails():
remove_unconfirmed_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_unconfirmed_emails():
return models.User.find(
Q('date_confirmed', 'eq', None)
& Q('emails', 'ne', [])
)
def remove_unconfirmed_emails(user):
user.emails = []
class TestMigrateUnconfirmedEmails(OsfTestCase):
def setUp(self):
super(TestMigrateUnconfirmedEmails, self).setUp()
self.registered_user = factories.UserFactory()
self.unconfirmed = factories.UnconfirmedUserFactory()
self.unregistered = factories.UnregUserFactory()
self.unregistered.emails = [self.unregistered.username]
self.unregistered.save()
def tearDown(self):
super(TestMigrateUnconfirmedEmails, self).tearDown()
models.User.remove()
def test_get_users(self):
self.unregistered.reload()
assert_equal(
list(get_users_with_unconfirmed_emails()),
[self.unregistered]
)
def test_fix_user(self):
remove_unconfirmed_emails(self.unregistered)
assert_equal(
self.unregistered.emails,
[]
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration script for users with unconfirmed emails<commit_after>
|
"""Removes User.username from User.emails for unregistered users.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
from tests import factories
from tests.base import OsfTestCase
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with unconfirmed email"
"s")
for user in get_users_with_unconfirmed_emails():
remove_unconfirmed_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_unconfirmed_emails():
return models.User.find(
Q('date_confirmed', 'eq', None)
& Q('emails', 'ne', [])
)
def remove_unconfirmed_emails(user):
user.emails = []
class TestMigrateUnconfirmedEmails(OsfTestCase):
def setUp(self):
super(TestMigrateUnconfirmedEmails, self).setUp()
self.registered_user = factories.UserFactory()
self.unconfirmed = factories.UnconfirmedUserFactory()
self.unregistered = factories.UnregUserFactory()
self.unregistered.emails = [self.unregistered.username]
self.unregistered.save()
def tearDown(self):
super(TestMigrateUnconfirmedEmails, self).tearDown()
models.User.remove()
def test_get_users(self):
self.unregistered.reload()
assert_equal(
list(get_users_with_unconfirmed_emails()),
[self.unregistered]
)
def test_fix_user(self):
remove_unconfirmed_emails(self.unregistered)
assert_equal(
self.unregistered.emails,
[]
)
if __name__ == '__main__':
main()
|
Add migration script for users with unconfirmed emails"""Removes User.username from User.emails for unregistered users.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
from tests import factories
from tests.base import OsfTestCase
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with unconfirmed email"
"s")
for user in get_users_with_unconfirmed_emails():
remove_unconfirmed_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_unconfirmed_emails():
return models.User.find(
Q('date_confirmed', 'eq', None)
& Q('emails', 'ne', [])
)
def remove_unconfirmed_emails(user):
user.emails = []
class TestMigrateUnconfirmedEmails(OsfTestCase):
def setUp(self):
super(TestMigrateUnconfirmedEmails, self).setUp()
self.registered_user = factories.UserFactory()
self.unconfirmed = factories.UnconfirmedUserFactory()
self.unregistered = factories.UnregUserFactory()
self.unregistered.emails = [self.unregistered.username]
self.unregistered.save()
def tearDown(self):
super(TestMigrateUnconfirmedEmails, self).tearDown()
models.User.remove()
def test_get_users(self):
self.unregistered.reload()
assert_equal(
list(get_users_with_unconfirmed_emails()),
[self.unregistered]
)
def test_fix_user(self):
remove_unconfirmed_emails(self.unregistered)
assert_equal(
self.unregistered.emails,
[]
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration script for users with unconfirmed emails<commit_after>"""Removes User.username from User.emails for unregistered users.
"""
import logging
import sys
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
from tests import factories
from tests.base import OsfTestCase
logger = logging.getLogger(__name__)
def main():
# Set up storage backends
init_app(routes=False)
dry_run = 'dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with unconfirmed email"
"s")
for user in get_users_with_unconfirmed_emails():
remove_unconfirmed_emails(user)
logger.info(repr(user))
if not dry_run:
user.save()
def get_users_with_unconfirmed_emails():
return models.User.find(
Q('date_confirmed', 'eq', None)
& Q('emails', 'ne', [])
)
def remove_unconfirmed_emails(user):
user.emails = []
class TestMigrateUnconfirmedEmails(OsfTestCase):
def setUp(self):
super(TestMigrateUnconfirmedEmails, self).setUp()
self.registered_user = factories.UserFactory()
self.unconfirmed = factories.UnconfirmedUserFactory()
self.unregistered = factories.UnregUserFactory()
self.unregistered.emails = [self.unregistered.username]
self.unregistered.save()
def tearDown(self):
super(TestMigrateUnconfirmedEmails, self).tearDown()
models.User.remove()
def test_get_users(self):
self.unregistered.reload()
assert_equal(
list(get_users_with_unconfirmed_emails()),
[self.unregistered]
)
def test_fix_user(self):
remove_unconfirmed_emails(self.unregistered)
assert_equal(
self.unregistered.emails,
[]
)
if __name__ == '__main__':
main()
|
|
c56aef2c927290732dfb8ed65f173f7d8bb58439
|
git_externals/gitlab_utils.py
|
git_externals/gitlab_utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import click
import gitlab
def iter_projects(gl):
page = 0
while True:
projects = gl.projects.list(page=page, per_page=10)
if len(projects) == 0:
break
else:
page = page + 1
for project in projects:
yield project
@click.group()
@click.option('--gitlab-id', default=None)
@click.option('--config', default=None)
@click.pass_context
def cli(ctx, gitlab_id, config):
if config is not None:
config = [config]
gl = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config)
ctx.obj['api'] = gl
@cli.group()
@click.pass_context
def project(ctx):
pass
def get_project_by_path(gl, path):
with click.progressbar(iter_projects(gl), label='Searching project...') as projects:
for prj in projects:
import q
q(prj.path_with_namespace)
if prj.path_with_namespace == path:
return prj
@project.command()
@click.argument('path')
@click.option('--sync', is_flag=True)
@click.pass_context
def delete(ctx, path, sync):
gl = ctx.obj['api']
prj = get_project_by_path(gl, path)
if prj is None:
click.echo('Unable to find a matching project for path %r' % path, err=True)
return
try:
if not gl.delete(prj):
raise click.UsegeError('Unable to delete project for path %r' % path)
except gitlab.GitlabGetError:
click.echo('The project %r seems to be already deleted' % path, err=True)
return
if sync:
with click.progressbar(range(12), label='Waiting for deletion...') as waiting:
for step in waiting:
try:
gl.projects.get(prj.id)
except gitlab.GitlabGetError:
deleted = True
break
time.sleep(10)
else:
deleted = False
if deleted:
click.echo('Project %r deleted' % path)
else:
click.UsegeError('Timeout waiting for %r deletion' % path)
else:
click.echo('Project %r submitted for deletion' % path)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
Add gittify-gitlab to detete projects by path
|
Add gittify-gitlab to detete projects by path
|
Python
|
mit
|
develersrl/git-externals,develersrl/git-externals,develersrl/git-externals
|
Add gittify-gitlab to detete projects by path
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import click
import gitlab
def iter_projects(gl):
page = 0
while True:
projects = gl.projects.list(page=page, per_page=10)
if len(projects) == 0:
break
else:
page = page + 1
for project in projects:
yield project
@click.group()
@click.option('--gitlab-id', default=None)
@click.option('--config', default=None)
@click.pass_context
def cli(ctx, gitlab_id, config):
if config is not None:
config = [config]
gl = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config)
ctx.obj['api'] = gl
@cli.group()
@click.pass_context
def project(ctx):
pass
def get_project_by_path(gl, path):
with click.progressbar(iter_projects(gl), label='Searching project...') as projects:
for prj in projects:
import q
q(prj.path_with_namespace)
if prj.path_with_namespace == path:
return prj
@project.command()
@click.argument('path')
@click.option('--sync', is_flag=True)
@click.pass_context
def delete(ctx, path, sync):
gl = ctx.obj['api']
prj = get_project_by_path(gl, path)
if prj is None:
click.echo('Unable to find a matching project for path %r' % path, err=True)
return
try:
if not gl.delete(prj):
raise click.UsegeError('Unable to delete project for path %r' % path)
except gitlab.GitlabGetError:
click.echo('The project %r seems to be already deleted' % path, err=True)
return
if sync:
with click.progressbar(range(12), label='Waiting for deletion...') as waiting:
for step in waiting:
try:
gl.projects.get(prj.id)
except gitlab.GitlabGetError:
deleted = True
break
time.sleep(10)
else:
deleted = False
if deleted:
click.echo('Project %r deleted' % path)
else:
click.UsegeError('Timeout waiting for %r deletion' % path)
else:
click.echo('Project %r submitted for deletion' % path)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add gittify-gitlab to detete projects by path<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import click
import gitlab
def iter_projects(gl):
page = 0
while True:
projects = gl.projects.list(page=page, per_page=10)
if len(projects) == 0:
break
else:
page = page + 1
for project in projects:
yield project
@click.group()
@click.option('--gitlab-id', default=None)
@click.option('--config', default=None)
@click.pass_context
def cli(ctx, gitlab_id, config):
if config is not None:
config = [config]
gl = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config)
ctx.obj['api'] = gl
@cli.group()
@click.pass_context
def project(ctx):
pass
def get_project_by_path(gl, path):
with click.progressbar(iter_projects(gl), label='Searching project...') as projects:
for prj in projects:
import q
q(prj.path_with_namespace)
if prj.path_with_namespace == path:
return prj
@project.command()
@click.argument('path')
@click.option('--sync', is_flag=True)
@click.pass_context
def delete(ctx, path, sync):
gl = ctx.obj['api']
prj = get_project_by_path(gl, path)
if prj is None:
click.echo('Unable to find a matching project for path %r' % path, err=True)
return
try:
if not gl.delete(prj):
raise click.UsegeError('Unable to delete project for path %r' % path)
except gitlab.GitlabGetError:
click.echo('The project %r seems to be already deleted' % path, err=True)
return
if sync:
with click.progressbar(range(12), label='Waiting for deletion...') as waiting:
for step in waiting:
try:
gl.projects.get(prj.id)
except gitlab.GitlabGetError:
deleted = True
break
time.sleep(10)
else:
deleted = False
if deleted:
click.echo('Project %r deleted' % path)
else:
click.UsegeError('Timeout waiting for %r deletion' % path)
else:
click.echo('Project %r submitted for deletion' % path)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
Add gittify-gitlab to detete projects by path#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import click
import gitlab
def iter_projects(gl):
page = 0
while True:
projects = gl.projects.list(page=page, per_page=10)
if len(projects) == 0:
break
else:
page = page + 1
for project in projects:
yield project
@click.group()
@click.option('--gitlab-id', default=None)
@click.option('--config', default=None)
@click.pass_context
def cli(ctx, gitlab_id, config):
if config is not None:
config = [config]
gl = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config)
ctx.obj['api'] = gl
@cli.group()
@click.pass_context
def project(ctx):
pass
def get_project_by_path(gl, path):
with click.progressbar(iter_projects(gl), label='Searching project...') as projects:
for prj in projects:
import q
q(prj.path_with_namespace)
if prj.path_with_namespace == path:
return prj
@project.command()
@click.argument('path')
@click.option('--sync', is_flag=True)
@click.pass_context
def delete(ctx, path, sync):
gl = ctx.obj['api']
prj = get_project_by_path(gl, path)
if prj is None:
click.echo('Unable to find a matching project for path %r' % path, err=True)
return
try:
if not gl.delete(prj):
raise click.UsegeError('Unable to delete project for path %r' % path)
except gitlab.GitlabGetError:
click.echo('The project %r seems to be already deleted' % path, err=True)
return
if sync:
with click.progressbar(range(12), label='Waiting for deletion...') as waiting:
for step in waiting:
try:
gl.projects.get(prj.id)
except gitlab.GitlabGetError:
deleted = True
break
time.sleep(10)
else:
deleted = False
if deleted:
click.echo('Project %r deleted' % path)
else:
click.UsegeError('Timeout waiting for %r deletion' % path)
else:
click.echo('Project %r submitted for deletion' % path)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add gittify-gitlab to detete projects by path<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import click
import gitlab
def iter_projects(gl):
page = 0
while True:
projects = gl.projects.list(page=page, per_page=10)
if len(projects) == 0:
break
else:
page = page + 1
for project in projects:
yield project
@click.group()
@click.option('--gitlab-id', default=None)
@click.option('--config', default=None)
@click.pass_context
def cli(ctx, gitlab_id, config):
if config is not None:
config = [config]
gl = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config)
ctx.obj['api'] = gl
@cli.group()
@click.pass_context
def project(ctx):
pass
def get_project_by_path(gl, path):
with click.progressbar(iter_projects(gl), label='Searching project...') as projects:
for prj in projects:
import q
q(prj.path_with_namespace)
if prj.path_with_namespace == path:
return prj
@project.command()
@click.argument('path')
@click.option('--sync', is_flag=True)
@click.pass_context
def delete(ctx, path, sync):
gl = ctx.obj['api']
prj = get_project_by_path(gl, path)
if prj is None:
click.echo('Unable to find a matching project for path %r' % path, err=True)
return
try:
if not gl.delete(prj):
raise click.UsegeError('Unable to delete project for path %r' % path)
except gitlab.GitlabGetError:
click.echo('The project %r seems to be already deleted' % path, err=True)
return
if sync:
with click.progressbar(range(12), label='Waiting for deletion...') as waiting:
for step in waiting:
try:
gl.projects.get(prj.id)
except gitlab.GitlabGetError:
deleted = True
break
time.sleep(10)
else:
deleted = False
if deleted:
click.echo('Project %r deleted' % path)
else:
click.UsegeError('Timeout waiting for %r deletion' % path)
else:
click.echo('Project %r submitted for deletion' % path)
def main():
cli(obj={})
if __name__ == '__main__':
main()
|
|
4b59c70eb8ea0610f50217a2d9ffaa439be88041
|
migrations/versions/144_update_calendar_index.py
|
migrations/versions/144_update_calendar_index.py
|
"""Update Calendar index.
Revision ID: 1c73ca99c03b
Revises: 1d7a72222b7c
Create Date: 2015-02-26 00:50:52.322510
"""
# revision identifiers, used by Alembic.
revision = '1c73ca99c03b'
down_revision = '1d7a72222b7c'
from alembic import op
def upgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name', 'uid'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
def downgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
|
Revert "forgot to remove migration"
|
Revert "forgot to remove migration"
This reverts commit 41c9fa093909eced94c18acd03a68a3843fb359f.
Reason for rollback: Assuming original commit was a mistake.
|
Python
|
agpl-3.0
|
Eagles2F/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,Eagles2F/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,jobscore/sync-engine,closeio/nylas,ErinCall/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,nylas/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine,closeio/nylas,nylas/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine
|
Revert "forgot to remove migration"
This reverts commit 41c9fa093909eced94c18acd03a68a3843fb359f.
Reason for rollback: Assuming original commit was a mistake.
|
"""Update Calendar index.
Revision ID: 1c73ca99c03b
Revises: 1d7a72222b7c
Create Date: 2015-02-26 00:50:52.322510
"""
# revision identifiers, used by Alembic.
revision = '1c73ca99c03b'
down_revision = '1d7a72222b7c'
from alembic import op
def upgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name', 'uid'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
def downgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
|
<commit_before><commit_msg>Revert "forgot to remove migration"
This reverts commit 41c9fa093909eced94c18acd03a68a3843fb359f.
Reason for rollback: Assuming original commit was a mistake.<commit_after>
|
"""Update Calendar index.
Revision ID: 1c73ca99c03b
Revises: 1d7a72222b7c
Create Date: 2015-02-26 00:50:52.322510
"""
# revision identifiers, used by Alembic.
revision = '1c73ca99c03b'
down_revision = '1d7a72222b7c'
from alembic import op
def upgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name', 'uid'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
def downgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
|
Revert "forgot to remove migration"
This reverts commit 41c9fa093909eced94c18acd03a68a3843fb359f.
Reason for rollback: Assuming original commit was a mistake."""Update Calendar index.
Revision ID: 1c73ca99c03b
Revises: 1d7a72222b7c
Create Date: 2015-02-26 00:50:52.322510
"""
# revision identifiers, used by Alembic.
revision = '1c73ca99c03b'
down_revision = '1d7a72222b7c'
from alembic import op
def upgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name', 'uid'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
def downgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
|
<commit_before><commit_msg>Revert "forgot to remove migration"
This reverts commit 41c9fa093909eced94c18acd03a68a3843fb359f.
Reason for rollback: Assuming original commit was a mistake.<commit_after>"""Update Calendar index.
Revision ID: 1c73ca99c03b
Revises: 1d7a72222b7c
Create Date: 2015-02-26 00:50:52.322510
"""
# revision identifiers, used by Alembic.
revision = '1c73ca99c03b'
down_revision = '1d7a72222b7c'
from alembic import op
def upgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name', 'uid'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
def downgrade():
op.drop_constraint('calendar_ibfk_1', 'calendar', type_='foreignkey')
op.drop_constraint('uuid', 'calendar', type_='unique')
op.create_index('uuid', 'calendar',
['namespace_id', 'provider_name', 'name'], unique=True)
op.create_foreign_key('calendar_ibfk_1',
'calendar', 'namespace',
['namespace_id'], ['id'])
|
|
3a7611f6bebc592c1999af996b96d9a812c33ad5
|
imap_cli/tests/test_config.py
|
imap_cli/tests/test_config.py
|
# -*- coding: utf-8 -*-
"""Test config"""
import unittest
from imap_cli import config
class HelpersTest(unittest.TestCase):
def setUp(self):
self.config_filename = 'config-example.ini'
def test_config_file(self):
self.ctx = config.new_context_from_file(self.config_filename)
assert self.ctx.hostname == 'imap.example.org'
assert self.ctx.username == 'username'
assert self.ctx.password == 'secret'
assert self.ctx.ssl is True
assert self.ctx.limit == 10
assert self.ctx.format_status == ("\n",
"ID: {mail_id}\n",
"Flags: {flags}\n",
"From: {mail_from}\n",
"To: {to}\n",
"Date: {date}\n",
"Subjetc: {subject}",
)
assert self.ctx.format_status == "{directory:>20} : {count:>5} Mails - {unseen:>5} Unseen - {recent:>5} Recent"
|
Add basic test for config module
|
Add basic test for config module
|
Python
|
mit
|
Gentux/imap-cli,Gentux/imap-cli
|
Add basic test for config module
|
# -*- coding: utf-8 -*-
"""Test config"""
import unittest
from imap_cli import config
class HelpersTest(unittest.TestCase):
def setUp(self):
self.config_filename = 'config-example.ini'
def test_config_file(self):
self.ctx = config.new_context_from_file(self.config_filename)
assert self.ctx.hostname == 'imap.example.org'
assert self.ctx.username == 'username'
assert self.ctx.password == 'secret'
assert self.ctx.ssl is True
assert self.ctx.limit == 10
assert self.ctx.format_status == ("\n",
"ID: {mail_id}\n",
"Flags: {flags}\n",
"From: {mail_from}\n",
"To: {to}\n",
"Date: {date}\n",
"Subjetc: {subject}",
)
assert self.ctx.format_status == "{directory:>20} : {count:>5} Mails - {unseen:>5} Unseen - {recent:>5} Recent"
|
<commit_before><commit_msg>Add basic test for config module<commit_after>
|
# -*- coding: utf-8 -*-
"""Test config"""
import unittest
from imap_cli import config
class HelpersTest(unittest.TestCase):
def setUp(self):
self.config_filename = 'config-example.ini'
def test_config_file(self):
self.ctx = config.new_context_from_file(self.config_filename)
assert self.ctx.hostname == 'imap.example.org'
assert self.ctx.username == 'username'
assert self.ctx.password == 'secret'
assert self.ctx.ssl is True
assert self.ctx.limit == 10
assert self.ctx.format_status == ("\n",
"ID: {mail_id}\n",
"Flags: {flags}\n",
"From: {mail_from}\n",
"To: {to}\n",
"Date: {date}\n",
"Subjetc: {subject}",
)
assert self.ctx.format_status == "{directory:>20} : {count:>5} Mails - {unseen:>5} Unseen - {recent:>5} Recent"
|
Add basic test for config module# -*- coding: utf-8 -*-
"""Test config"""
import unittest
from imap_cli import config
class HelpersTest(unittest.TestCase):
def setUp(self):
self.config_filename = 'config-example.ini'
def test_config_file(self):
self.ctx = config.new_context_from_file(self.config_filename)
assert self.ctx.hostname == 'imap.example.org'
assert self.ctx.username == 'username'
assert self.ctx.password == 'secret'
assert self.ctx.ssl is True
assert self.ctx.limit == 10
assert self.ctx.format_status == ("\n",
"ID: {mail_id}\n",
"Flags: {flags}\n",
"From: {mail_from}\n",
"To: {to}\n",
"Date: {date}\n",
"Subjetc: {subject}",
)
assert self.ctx.format_status == "{directory:>20} : {count:>5} Mails - {unseen:>5} Unseen - {recent:>5} Recent"
|
<commit_before><commit_msg>Add basic test for config module<commit_after># -*- coding: utf-8 -*-
"""Test config"""
import unittest
from imap_cli import config
class HelpersTest(unittest.TestCase):
def setUp(self):
self.config_filename = 'config-example.ini'
def test_config_file(self):
self.ctx = config.new_context_from_file(self.config_filename)
assert self.ctx.hostname == 'imap.example.org'
assert self.ctx.username == 'username'
assert self.ctx.password == 'secret'
assert self.ctx.ssl is True
assert self.ctx.limit == 10
assert self.ctx.format_status == ("\n",
"ID: {mail_id}\n",
"Flags: {flags}\n",
"From: {mail_from}\n",
"To: {to}\n",
"Date: {date}\n",
"Subjetc: {subject}",
)
assert self.ctx.format_status == "{directory:>20} : {count:>5} Mails - {unseen:>5} Unseen - {recent:>5} Recent"
|
|
2d342bba72637dcc3191aa2eab34b824e20fe10a
|
FEMur/solver2D.py
|
FEMur/solver2D.py
|
from FEMur import *
import sympy as sy
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import scipy.interpolate
from math import ceil
class Solver(object):
'''
2-dimensional solver top class.
Provides common initialization to all child solver classes.
'''
def __init__(self):
pass
class SteadyHeatSolver(Solver):
'''
2-dimensional steady state heat transfer solver.
'''
def __init__(self):
Solver.__init__()
pass
class SteadyStructureSolver(Solver):
'''
2-dimensional steady state structure solver.
'''
def __init__(self):
Solver.__init__()
pass
|
Add skeleton of all 2D solver classes
|
Add skeleton of all 2D solver classes
3 new classes have been added.
Solver class - Top class for all solver child classes
SteadyHeatSolver class - Steady state heat transfer solver
SteadyStructureSolver class - Steady state structure solver
|
Python
|
mit
|
MrJarv1s/FEMur
|
Add skeleton of all 2D solver classes
3 new classes have been added.
Solver class - Top class for all solver child classes
SteadyHeatSolver class - Steady state heat transfer solver
SteadyStructureSolver class - Steady state structure solver
|
from FEMur import *
import sympy as sy
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import scipy.interpolate
from math import ceil
class Solver(object):
'''
2-dimensional solver top class.
Provides common initialization to all child solver classes.
'''
def __init__(self):
pass
class SteadyHeatSolver(Solver):
'''
2-dimensional steady state heat transfer solver.
'''
def __init__(self):
Solver.__init__()
pass
class SteadyStructureSolver(Solver):
'''
2-dimensional steady state structure solver.
'''
def __init__(self):
Solver.__init__()
pass
|
<commit_before><commit_msg>Add skeleton of all 2D solver classes
3 new classes have been added.
Solver class - Top class for all solver child classes
SteadyHeatSolver class - Steady state heat transfer solver
SteadyStructureSolver class - Steady state structure solver<commit_after>
|
from FEMur import *
import sympy as sy
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import scipy.interpolate
from math import ceil
class Solver(object):
'''
2-dimensional solver top class.
Provides common initialization to all child solver classes.
'''
def __init__(self):
pass
class SteadyHeatSolver(Solver):
'''
2-dimensional steady state heat transfer solver.
'''
def __init__(self):
Solver.__init__()
pass
class SteadyStructureSolver(Solver):
'''
2-dimensional steady state structure solver.
'''
def __init__(self):
Solver.__init__()
pass
|
Add skeleton of all 2D solver classes
3 new classes have been added.
Solver class - Top class for all solver child classes
SteadyHeatSolver class - Steady state heat transfer solver
SteadyStructureSolver class - Steady state structure solverfrom FEMur import *
import sympy as sy
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import scipy.interpolate
from math import ceil
class Solver(object):
'''
2-dimensional solver top class.
Provides common initialization to all child solver classes.
'''
def __init__(self):
pass
class SteadyHeatSolver(Solver):
'''
2-dimensional steady state heat transfer solver.
'''
def __init__(self):
Solver.__init__()
pass
class SteadyStructureSolver(Solver):
'''
2-dimensional steady state structure solver.
'''
def __init__(self):
Solver.__init__()
pass
|
<commit_before><commit_msg>Add skeleton of all 2D solver classes
3 new classes have been added.
Solver class - Top class for all solver child classes
SteadyHeatSolver class - Steady state heat transfer solver
SteadyStructureSolver class - Steady state structure solver<commit_after>from FEMur import *
import sympy as sy
import numpy as np
import scipy as sc
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import scipy.interpolate
from math import ceil
class Solver(object):
'''
2-dimensional solver top class.
Provides common initialization to all child solver classes.
'''
def __init__(self):
pass
class SteadyHeatSolver(Solver):
'''
2-dimensional steady state heat transfer solver.
'''
def __init__(self):
Solver.__init__()
pass
class SteadyStructureSolver(Solver):
'''
2-dimensional steady state structure solver.
'''
def __init__(self):
Solver.__init__()
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.