commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c187f77d3cc05f35613f3355f1df56e11d012463
|
bluebottle/payments_docdata/tests/test_gateway.py
|
bluebottle/payments_docdata/tests/test_gateway.py
|
from bluebottle.payments_docdata.gateway import DocdataClient, Amount, Shopper, Name, Destination, Address, Merchant
from bunch import bunchify
from mock import patch, Mock
from bluebottle.test.utils import BluebottleTestCase
class DocdataClientMock():
class service():
@staticmethod
def create(*args, **kwargs):
return bunchify({
'createSuccess': {
'key': 'HAZZAHAZZA'
}
})
class factory:
@staticmethod
def create(ns):
return Mock()
@patch('bluebottle.payments_docdata.gateway.Client', return_value=DocdataClientMock())
class DocdataGatewayTestCase(BluebottleTestCase):
def test_create(self, mock_client):
credentials = {
'merchant_name': 'test',
'merchant_password': 'top-secret',
}
self.gateway = DocdataClient(credentials)
merchant = Merchant('test', 'top-secret')
amount = Amount(35, 'EUR')
name1 = Name('Henk', 'Wijngaarden')
shopper = Shopper(12, name1, 'henk@truck.nl', 'en')
name2 = Name('Plat', 'Form')
address = Address('s Gravenhekje', '1', 'A', '1011TG', 'Amsterdam', 'NH', 'NL')
bill_to = Destination(name2, address)
result = self.gateway.create(
merchant=merchant,
payment_id='123',
total_gross_amount=amount,
shopper=shopper,
bill_to=bill_to,
description='Donation',
receiptText='Thanks'
)
self.assertEqual(result, {'order_id': '123-1', 'order_key': 'HAZZAHAZZA'})
|
Add test for Docdata gateway
|
Add test for Docdata gateway
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add test for Docdata gateway
|
from bluebottle.payments_docdata.gateway import DocdataClient, Amount, Shopper, Name, Destination, Address, Merchant
from bunch import bunchify
from mock import patch, Mock
from bluebottle.test.utils import BluebottleTestCase
class DocdataClientMock():
class service():
@staticmethod
def create(*args, **kwargs):
return bunchify({
'createSuccess': {
'key': 'HAZZAHAZZA'
}
})
class factory:
@staticmethod
def create(ns):
return Mock()
@patch('bluebottle.payments_docdata.gateway.Client', return_value=DocdataClientMock())
class DocdataGatewayTestCase(BluebottleTestCase):
def test_create(self, mock_client):
credentials = {
'merchant_name': 'test',
'merchant_password': 'top-secret',
}
self.gateway = DocdataClient(credentials)
merchant = Merchant('test', 'top-secret')
amount = Amount(35, 'EUR')
name1 = Name('Henk', 'Wijngaarden')
shopper = Shopper(12, name1, 'henk@truck.nl', 'en')
name2 = Name('Plat', 'Form')
address = Address('s Gravenhekje', '1', 'A', '1011TG', 'Amsterdam', 'NH', 'NL')
bill_to = Destination(name2, address)
result = self.gateway.create(
merchant=merchant,
payment_id='123',
total_gross_amount=amount,
shopper=shopper,
bill_to=bill_to,
description='Donation',
receiptText='Thanks'
)
self.assertEqual(result, {'order_id': '123-1', 'order_key': 'HAZZAHAZZA'})
|
<commit_before><commit_msg>Add test for Docdata gateway<commit_after>
|
from bluebottle.payments_docdata.gateway import DocdataClient, Amount, Shopper, Name, Destination, Address, Merchant
from bunch import bunchify
from mock import patch, Mock
from bluebottle.test.utils import BluebottleTestCase
class DocdataClientMock():
class service():
@staticmethod
def create(*args, **kwargs):
return bunchify({
'createSuccess': {
'key': 'HAZZAHAZZA'
}
})
class factory:
@staticmethod
def create(ns):
return Mock()
@patch('bluebottle.payments_docdata.gateway.Client', return_value=DocdataClientMock())
class DocdataGatewayTestCase(BluebottleTestCase):
def test_create(self, mock_client):
credentials = {
'merchant_name': 'test',
'merchant_password': 'top-secret',
}
self.gateway = DocdataClient(credentials)
merchant = Merchant('test', 'top-secret')
amount = Amount(35, 'EUR')
name1 = Name('Henk', 'Wijngaarden')
shopper = Shopper(12, name1, 'henk@truck.nl', 'en')
name2 = Name('Plat', 'Form')
address = Address('s Gravenhekje', '1', 'A', '1011TG', 'Amsterdam', 'NH', 'NL')
bill_to = Destination(name2, address)
result = self.gateway.create(
merchant=merchant,
payment_id='123',
total_gross_amount=amount,
shopper=shopper,
bill_to=bill_to,
description='Donation',
receiptText='Thanks'
)
self.assertEqual(result, {'order_id': '123-1', 'order_key': 'HAZZAHAZZA'})
|
Add test for Docdata gatewayfrom bluebottle.payments_docdata.gateway import DocdataClient, Amount, Shopper, Name, Destination, Address, Merchant
from bunch import bunchify
from mock import patch, Mock
from bluebottle.test.utils import BluebottleTestCase
class DocdataClientMock():
class service():
@staticmethod
def create(*args, **kwargs):
return bunchify({
'createSuccess': {
'key': 'HAZZAHAZZA'
}
})
class factory:
@staticmethod
def create(ns):
return Mock()
@patch('bluebottle.payments_docdata.gateway.Client', return_value=DocdataClientMock())
class DocdataGatewayTestCase(BluebottleTestCase):
def test_create(self, mock_client):
credentials = {
'merchant_name': 'test',
'merchant_password': 'top-secret',
}
self.gateway = DocdataClient(credentials)
merchant = Merchant('test', 'top-secret')
amount = Amount(35, 'EUR')
name1 = Name('Henk', 'Wijngaarden')
shopper = Shopper(12, name1, 'henk@truck.nl', 'en')
name2 = Name('Plat', 'Form')
address = Address('s Gravenhekje', '1', 'A', '1011TG', 'Amsterdam', 'NH', 'NL')
bill_to = Destination(name2, address)
result = self.gateway.create(
merchant=merchant,
payment_id='123',
total_gross_amount=amount,
shopper=shopper,
bill_to=bill_to,
description='Donation',
receiptText='Thanks'
)
self.assertEqual(result, {'order_id': '123-1', 'order_key': 'HAZZAHAZZA'})
|
<commit_before><commit_msg>Add test for Docdata gateway<commit_after>from bluebottle.payments_docdata.gateway import DocdataClient, Amount, Shopper, Name, Destination, Address, Merchant
from bunch import bunchify
from mock import patch, Mock
from bluebottle.test.utils import BluebottleTestCase
class DocdataClientMock():
class service():
@staticmethod
def create(*args, **kwargs):
return bunchify({
'createSuccess': {
'key': 'HAZZAHAZZA'
}
})
class factory:
@staticmethod
def create(ns):
return Mock()
@patch('bluebottle.payments_docdata.gateway.Client', return_value=DocdataClientMock())
class DocdataGatewayTestCase(BluebottleTestCase):
def test_create(self, mock_client):
credentials = {
'merchant_name': 'test',
'merchant_password': 'top-secret',
}
self.gateway = DocdataClient(credentials)
merchant = Merchant('test', 'top-secret')
amount = Amount(35, 'EUR')
name1 = Name('Henk', 'Wijngaarden')
shopper = Shopper(12, name1, 'henk@truck.nl', 'en')
name2 = Name('Plat', 'Form')
address = Address('s Gravenhekje', '1', 'A', '1011TG', 'Amsterdam', 'NH', 'NL')
bill_to = Destination(name2, address)
result = self.gateway.create(
merchant=merchant,
payment_id='123',
total_gross_amount=amount,
shopper=shopper,
bill_to=bill_to,
description='Donation',
receiptText='Thanks'
)
self.assertEqual(result, {'order_id': '123-1', 'order_key': 'HAZZAHAZZA'})
|
|
faf9dfd9f1b1218ad58c281734b2af4074d0fd1f
|
tests/rules_tests/grammarManipulation_tests/InvalidAddTest.py
|
tests/rules_tests/grammarManipulation_tests/InvalidAddTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Rule as _R, Grammar, Nonterminal as _N
class InvalidAddTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for tests of rule invalid add
|
Add file for tests of rule invalid add
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add file for tests of rule invalid add
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Rule as _R, Grammar, Nonterminal as _N
class InvalidAddTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for tests of rule invalid add<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Rule as _R, Grammar, Nonterminal as _N
class InvalidAddTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for tests of rule invalid add#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Rule as _R, Grammar, Nonterminal as _N
class InvalidAddTest(TestCase):
pass
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add file for tests of rule invalid add<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Rule as _R, Grammar, Nonterminal as _N
class InvalidAddTest(TestCase):
pass
if __name__ == '__main__':
main()
|
|
79facb190017c156915c24d1ca6bd10c6b2021bb
|
apps/accounts/management/commands/fix_import_ggm.py
|
apps/accounts/management/commands/fix_import_ggm.py
|
import MySQLdb
from django.core.management.base import NoArgsCommand, BaseCommand
from django.db import transaction
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from apps.reminder.models import UserReminderInfo
from apps.survey.models import SurveyUser
class Command(NoArgsCommand):
@transaction.commit_on_success
def handle_noargs(self, **options):
c = MySQLdb.connect(host="localhost", user="root", passwd="", db="ggm_existing_tmp", charset='utf8')
cursor = c.cursor ()
cursor.execute ("SELECT * FROM `meter` WHERE laatste_mail >= 117 AND wil_herinnering = '1'")
rows = cursor.fetchall()
for i, row in enumerate(rows):
meter_id, UUID, email, naam, postcode, geb_datum, geslacht, password, _pw_new, laatste_meting, laatste_mail, herinnering, reken_postcode, wil_herinnering, stress = row
print i, email,
u = authenticate(username=email[:30], password=password)
if not u:
print "SKIPPING"
continue
u.set_password(_pw_new)
u.save()
print
cursor.close ()
c.close ()
|
Fix on ggm user import script
|
Fix on ggm user import script
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website
|
Fix on ggm user import script
|
import MySQLdb
from django.core.management.base import NoArgsCommand, BaseCommand
from django.db import transaction
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from apps.reminder.models import UserReminderInfo
from apps.survey.models import SurveyUser
class Command(NoArgsCommand):
@transaction.commit_on_success
def handle_noargs(self, **options):
c = MySQLdb.connect(host="localhost", user="root", passwd="", db="ggm_existing_tmp", charset='utf8')
cursor = c.cursor ()
cursor.execute ("SELECT * FROM `meter` WHERE laatste_mail >= 117 AND wil_herinnering = '1'")
rows = cursor.fetchall()
for i, row in enumerate(rows):
meter_id, UUID, email, naam, postcode, geb_datum, geslacht, password, _pw_new, laatste_meting, laatste_mail, herinnering, reken_postcode, wil_herinnering, stress = row
print i, email,
u = authenticate(username=email[:30], password=password)
if not u:
print "SKIPPING"
continue
u.set_password(_pw_new)
u.save()
print
cursor.close ()
c.close ()
|
<commit_before><commit_msg>Fix on ggm user import script<commit_after>
|
import MySQLdb
from django.core.management.base import NoArgsCommand, BaseCommand
from django.db import transaction
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from apps.reminder.models import UserReminderInfo
from apps.survey.models import SurveyUser
class Command(NoArgsCommand):
@transaction.commit_on_success
def handle_noargs(self, **options):
c = MySQLdb.connect(host="localhost", user="root", passwd="", db="ggm_existing_tmp", charset='utf8')
cursor = c.cursor ()
cursor.execute ("SELECT * FROM `meter` WHERE laatste_mail >= 117 AND wil_herinnering = '1'")
rows = cursor.fetchall()
for i, row in enumerate(rows):
meter_id, UUID, email, naam, postcode, geb_datum, geslacht, password, _pw_new, laatste_meting, laatste_mail, herinnering, reken_postcode, wil_herinnering, stress = row
print i, email,
u = authenticate(username=email[:30], password=password)
if not u:
print "SKIPPING"
continue
u.set_password(_pw_new)
u.save()
print
cursor.close ()
c.close ()
|
Fix on ggm user import scriptimport MySQLdb
from django.core.management.base import NoArgsCommand, BaseCommand
from django.db import transaction
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from apps.reminder.models import UserReminderInfo
from apps.survey.models import SurveyUser
class Command(NoArgsCommand):
@transaction.commit_on_success
def handle_noargs(self, **options):
c = MySQLdb.connect(host="localhost", user="root", passwd="", db="ggm_existing_tmp", charset='utf8')
cursor = c.cursor ()
cursor.execute ("SELECT * FROM `meter` WHERE laatste_mail >= 117 AND wil_herinnering = '1'")
rows = cursor.fetchall()
for i, row in enumerate(rows):
meter_id, UUID, email, naam, postcode, geb_datum, geslacht, password, _pw_new, laatste_meting, laatste_mail, herinnering, reken_postcode, wil_herinnering, stress = row
print i, email,
u = authenticate(username=email[:30], password=password)
if not u:
print "SKIPPING"
continue
u.set_password(_pw_new)
u.save()
print
cursor.close ()
c.close ()
|
<commit_before><commit_msg>Fix on ggm user import script<commit_after>import MySQLdb
from django.core.management.base import NoArgsCommand, BaseCommand
from django.db import transaction
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from apps.reminder.models import UserReminderInfo
from apps.survey.models import SurveyUser
class Command(NoArgsCommand):
@transaction.commit_on_success
def handle_noargs(self, **options):
c = MySQLdb.connect(host="localhost", user="root", passwd="", db="ggm_existing_tmp", charset='utf8')
cursor = c.cursor ()
cursor.execute ("SELECT * FROM `meter` WHERE laatste_mail >= 117 AND wil_herinnering = '1'")
rows = cursor.fetchall()
for i, row in enumerate(rows):
meter_id, UUID, email, naam, postcode, geb_datum, geslacht, password, _pw_new, laatste_meting, laatste_mail, herinnering, reken_postcode, wil_herinnering, stress = row
print i, email,
u = authenticate(username=email[:30], password=password)
if not u:
print "SKIPPING"
continue
u.set_password(_pw_new)
u.save()
print
cursor.close ()
c.close ()
|
|
7fb3df28b9fc9222e44ae48d8b2fd3c2a6b9fbfa
|
powerline/ext/vim/__init__.py
|
powerline/ext/vim/__init__.py
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
vim.command('source ' + vim.eval('fnameescape("' + os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim') + '")'))
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
from bindings import vim_get_func
fnameescape = vim_get_func('fnameescape')
vim.command('source ' + fnameescape(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim')))
|
Change code to use vim_get_func('fnameescape')
|
Change code to use vim_get_func('fnameescape')
Previous version had problems with paths containing backslashes and/or
double quotes.
|
Python
|
mit
|
junix/powerline,lukw00/powerline,S0lll0s/powerline,kenrachynski/powerline,EricSB/powerline,bezhermoso/powerline,bezhermoso/powerline,lukw00/powerline,prvnkumar/powerline,EricSB/powerline,blindFS/powerline,EricSB/powerline,xxxhycl2010/powerline,magus424/powerline,xfumihiro/powerline,dragon788/powerline,prvnkumar/powerline,areteix/powerline,russellb/powerline,xxxhycl2010/powerline,junix/powerline,s0undt3ch/powerline,wfscheper/powerline,dragon788/powerline,QuLogic/powerline,blindFS/powerline,darac/powerline,Liangjianghao/powerline,kenrachynski/powerline,IvanAli/powerline,bezhermoso/powerline,areteix/powerline,prvnkumar/powerline,junix/powerline,darac/powerline,bartvm/powerline,keelerm84/powerline,seanfisk/powerline,QuLogic/powerline,cyrixhero/powerline,seanfisk/powerline,kenrachynski/powerline,dragon788/powerline,QuLogic/powerline,wfscheper/powerline,areteix/powerline,firebitsbr/powerline,xfumihiro/powerline,Luffin/powerline,firebitsbr/powerline,Luffin/powerline,russellb/powerline,Liangjianghao/powerline,cyrixhero/powerline,DoctorJellyface/powerline,s0undt3ch/powerline,Luffin/powerline,s0undt3ch/powerline,S0lll0s/powerline,S0lll0s/powerline,IvanAli/powerline,seanfisk/powerline,blindFS/powerline,cyrixhero/powerline,bartvm/powerline,Liangjianghao/powerline,darac/powerline,keelerm84/powerline,bartvm/powerline,xxxhycl2010/powerline,wfscheper/powerline,DoctorJellyface/powerline,xfumihiro/powerline,lukw00/powerline,magus424/powerline,russellb/powerline,IvanAli/powerline,firebitsbr/powerline,magus424/powerline,DoctorJellyface/powerline
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
vim.command('source ' + vim.eval('fnameescape("' + os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim') + '")'))
Change code to use vim_get_func('fnameescape')
Previous version had problems with paths containing backslashes and/or
double quotes.
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
from bindings import vim_get_func
fnameescape = vim_get_func('fnameescape')
vim.command('source ' + fnameescape(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim')))
|
<commit_before># -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
vim.command('source ' + vim.eval('fnameescape("' + os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim') + '")'))
<commit_msg>Change code to use vim_get_func('fnameescape')
Previous version had problems with paths containing backslashes and/or
double quotes.<commit_after>
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
from bindings import vim_get_func
fnameescape = vim_get_func('fnameescape')
vim.command('source ' + fnameescape(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim')))
|
# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
vim.command('source ' + vim.eval('fnameescape("' + os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim') + '")'))
Change code to use vim_get_func('fnameescape')
Previous version had problems with paths containing backslashes and/or
double quotes.# -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
from bindings import vim_get_func
fnameescape = vim_get_func('fnameescape')
vim.command('source ' + fnameescape(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim')))
|
<commit_before># -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
vim.command('source ' + vim.eval('fnameescape("' + os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim') + '")'))
<commit_msg>Change code to use vim_get_func('fnameescape')
Previous version had problems with paths containing backslashes and/or
double quotes.<commit_after># -*- coding: utf-8 -*-
def source_plugin():
import os
import vim
from bindings import vim_get_func
fnameescape = vim_get_func('fnameescape')
vim.command('source ' + fnameescape(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'powerline.vim')))
|
e1d3befa79e30846dc905eb7225f51e9e374f21a
|
psyrun/tests/test_splitter.py
|
psyrun/tests/test_splitter.py
|
import os
import os.path
import pytest
from psyrun.core import load_infile, load_results, save_outfile
from psyrun.pspace import Param
from psyrun.split import Splitter
@pytest.mark.parametrize(
'pspace_size,max_splits,min_items,n_splits', [
(7, 4, 4, 2), (8, 4, 4, 2), (9, 4, 4, 3),
(15, 4, 4, 4), (16, 4, 4, 4), (17, 4, 4, 4),
(15, 2, 4, 2), (16, 4, 16, 1)
])
class TestSplitter(object):
def test_n_splits(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
assert splitter.n_splits == n_splits
assert len(list(splitter.iter_in_out_files())) == n_splits
def test_split_merge_roundtrip(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
splitter.split()
for filename in os.listdir(splitter.indir):
infile = os.path.join(splitter.indir, filename)
outfile = os.path.join(splitter.outdir, filename)
save_outfile(load_infile(infile), outfile)
result_file = os.path.join(str(tmpdir), 'result.h5')
Splitter.merge(str(tmpdir), result_file)
result = load_results(result_file)
assert sorted(result['x']) == sorted(range(pspace_size))
|
Add unit test for Splitter.
|
Add unit test for Splitter.
|
Python
|
mit
|
jgosmann/psyrun
|
Add unit test for Splitter.
|
import os
import os.path
import pytest
from psyrun.core import load_infile, load_results, save_outfile
from psyrun.pspace import Param
from psyrun.split import Splitter
@pytest.mark.parametrize(
'pspace_size,max_splits,min_items,n_splits', [
(7, 4, 4, 2), (8, 4, 4, 2), (9, 4, 4, 3),
(15, 4, 4, 4), (16, 4, 4, 4), (17, 4, 4, 4),
(15, 2, 4, 2), (16, 4, 16, 1)
])
class TestSplitter(object):
def test_n_splits(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
assert splitter.n_splits == n_splits
assert len(list(splitter.iter_in_out_files())) == n_splits
def test_split_merge_roundtrip(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
splitter.split()
for filename in os.listdir(splitter.indir):
infile = os.path.join(splitter.indir, filename)
outfile = os.path.join(splitter.outdir, filename)
save_outfile(load_infile(infile), outfile)
result_file = os.path.join(str(tmpdir), 'result.h5')
Splitter.merge(str(tmpdir), result_file)
result = load_results(result_file)
assert sorted(result['x']) == sorted(range(pspace_size))
|
<commit_before><commit_msg>Add unit test for Splitter.<commit_after>
|
import os
import os.path
import pytest
from psyrun.core import load_infile, load_results, save_outfile
from psyrun.pspace import Param
from psyrun.split import Splitter
@pytest.mark.parametrize(
'pspace_size,max_splits,min_items,n_splits', [
(7, 4, 4, 2), (8, 4, 4, 2), (9, 4, 4, 3),
(15, 4, 4, 4), (16, 4, 4, 4), (17, 4, 4, 4),
(15, 2, 4, 2), (16, 4, 16, 1)
])
class TestSplitter(object):
def test_n_splits(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
assert splitter.n_splits == n_splits
assert len(list(splitter.iter_in_out_files())) == n_splits
def test_split_merge_roundtrip(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
splitter.split()
for filename in os.listdir(splitter.indir):
infile = os.path.join(splitter.indir, filename)
outfile = os.path.join(splitter.outdir, filename)
save_outfile(load_infile(infile), outfile)
result_file = os.path.join(str(tmpdir), 'result.h5')
Splitter.merge(str(tmpdir), result_file)
result = load_results(result_file)
assert sorted(result['x']) == sorted(range(pspace_size))
|
Add unit test for Splitter.import os
import os.path
import pytest
from psyrun.core import load_infile, load_results, save_outfile
from psyrun.pspace import Param
from psyrun.split import Splitter
@pytest.mark.parametrize(
'pspace_size,max_splits,min_items,n_splits', [
(7, 4, 4, 2), (8, 4, 4, 2), (9, 4, 4, 3),
(15, 4, 4, 4), (16, 4, 4, 4), (17, 4, 4, 4),
(15, 2, 4, 2), (16, 4, 16, 1)
])
class TestSplitter(object):
def test_n_splits(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
assert splitter.n_splits == n_splits
assert len(list(splitter.iter_in_out_files())) == n_splits
def test_split_merge_roundtrip(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
splitter.split()
for filename in os.listdir(splitter.indir):
infile = os.path.join(splitter.indir, filename)
outfile = os.path.join(splitter.outdir, filename)
save_outfile(load_infile(infile), outfile)
result_file = os.path.join(str(tmpdir), 'result.h5')
Splitter.merge(str(tmpdir), result_file)
result = load_results(result_file)
assert sorted(result['x']) == sorted(range(pspace_size))
|
<commit_before><commit_msg>Add unit test for Splitter.<commit_after>import os
import os.path
import pytest
from psyrun.core import load_infile, load_results, save_outfile
from psyrun.pspace import Param
from psyrun.split import Splitter
@pytest.mark.parametrize(
'pspace_size,max_splits,min_items,n_splits', [
(7, 4, 4, 2), (8, 4, 4, 2), (9, 4, 4, 3),
(15, 4, 4, 4), (16, 4, 4, 4), (17, 4, 4, 4),
(15, 2, 4, 2), (16, 4, 16, 1)
])
class TestSplitter(object):
def test_n_splits(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
assert splitter.n_splits == n_splits
assert len(list(splitter.iter_in_out_files())) == n_splits
def test_split_merge_roundtrip(
self, tmpdir, pspace_size, max_splits, min_items, n_splits):
splitter = Splitter(
str(tmpdir), Param(x=range(pspace_size)), max_splits, min_items)
splitter.split()
for filename in os.listdir(splitter.indir):
infile = os.path.join(splitter.indir, filename)
outfile = os.path.join(splitter.outdir, filename)
save_outfile(load_infile(infile), outfile)
result_file = os.path.join(str(tmpdir), 'result.h5')
Splitter.merge(str(tmpdir), result_file)
result = load_results(result_file)
assert sorted(result['x']) == sorted(range(pspace_size))
|
|
ce483720321097d982888f6dcb673b9348daad4e
|
doc/book/book-dist.py
|
doc/book/book-dist.py
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
Add a little package-em-up script for the Subversion book.
|
Add a little package-em-up script for the Subversion book.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@849264 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion
|
Add a little package-em-up script for the Subversion book.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@849264 13f79535-47bb-0310-9956-ffa450edef68
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
<commit_before><commit_msg>Add a little package-em-up script for the Subversion book.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@849264 13f79535-47bb-0310-9956-ffa450edef68<commit_after>
|
#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
Add a little package-em-up script for the Subversion book.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@849264 13f79535-47bb-0310-9956-ffa450edef68#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
<commit_before><commit_msg>Add a little package-em-up script for the Subversion book.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@849264 13f79535-47bb-0310-9956-ffa450edef68<commit_after>#!/usr/bin/env python2
import sys
import os
import shutil
def die(msg):
sys.stderr.write('ERROR: ' + msg)
sys.exit(1)
cwd = os.getcwd()
if not os.path.exists('book') \
or not os.path.exists('Makefile'):
die('Please run this from the Subversion book source directory\n')
if not os.getenv('JAVA_HOME'):
die('JAVA_HOME is not set correctly.\n')
if os.path.exists('./usr'):
die('Please remove ./usr.\n')
os.putenv('FOP_OPTS', '-Xms100m -Xmx200m')
os.system('DESTDIR=. make book-clean install-book-html ' + \
'install-book-html-chunk install-book-pdf')
tarball = os.path.join(cwd, 'svnbook.tar.gz')
try:
os.chdir('./usr/share/doc/subversion')
os.rename('book', 'svnbook')
os.system('tar cvfz ' + tarball + ' svnbook')
finally:
os.chdir(cwd)
shutil.rmtree('./usr')
if not os.path.exists(tarball):
die('Hrm. It appears the tarball was not created.\n')
print 'Your tarball sits in ./svnbook.tar.gz. Enjoy!'
|
|
21080583ce8f5b1ec7d3f45c21f4781f539fa2dd
|
gtkmvco/examples/observable/value.py
|
gtkmvco/examples/observable/value.py
|
# Author: Roberto Cavada <cavada@fbk.eu>
#
# Copyright (c) 2006 by Roberto Cavada
#
# pygtkmvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# pygtkmvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
#
# For more information on pygtkmvc see <http://pygtkmvc.sourceforge.net>
# or email to the author Roberto Cavada <cavada@fbk.eu>.
# Please report bugs to <cavada@fbk.eu>.
# ----------------------------------------------------------------------
# In this example the use of observable properties is shown.
# The example does not need a view and a controller, as only
# the model side (and an observer) is used.
# ----------------------------------------------------------------------
import _importer
from gtkmvc import Model
from gtkmvc import Observer
# ----------------------------------------------------------------------
class MyModel (Model):
internal = 0
# external here is a property that is not stored internally, but
# handled by a pair of methods (a getter and a setter)
__observables__ = ["internal", "external"]
def get_external_value(self): return "some value for external"
def set_external_value(self, val):
print "setter for external was called"
return
pass
# ----------------------------------------------------------------------
class MyObserver (Observer):
"""Since version 1.0.0, base class 'Observer' is provided to
create observers that are not necessarily derived from Controller"""
# notifications
def property_internal_value_change(self, model, old, new):
print "internal changed!"
return
def property_external_value_change(self, model, old, new):
print "external changed!"
return
pass
# Look at what happens to the observer
if __name__ == "__main__":
m = MyModel()
c = MyObserver(m)
m.internal = 20
m.external = "a new value"
pass
|
TEST New test/example for the new feature supporting custom getter/setter for properties
|
TEST
New test/example for the new feature supporting custom getter/setter for
properties
[RC]
|
Python
|
lgpl-2.1
|
roboogle/gtkmvc3,roboogle/gtkmvc3
|
TEST
New test/example for the new feature supporting custom getter/setter for
properties
[RC]
|
# Author: Roberto Cavada <cavada@fbk.eu>
#
# Copyright (c) 2006 by Roberto Cavada
#
# pygtkmvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# pygtkmvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
#
# For more information on pygtkmvc see <http://pygtkmvc.sourceforge.net>
# or email to the author Roberto Cavada <cavada@fbk.eu>.
# Please report bugs to <cavada@fbk.eu>.
# ----------------------------------------------------------------------
# In this example the use of observable properties is shown.
# The example does not need a view and a controller, as only
# the model side (and an observer) is used.
# ----------------------------------------------------------------------
import _importer
from gtkmvc import Model
from gtkmvc import Observer
# ----------------------------------------------------------------------
class MyModel (Model):
internal = 0
# external here is a property that is not stored internally, but
# handled by a pair of methods (a getter and a setter)
__observables__ = ["internal", "external"]
def get_external_value(self): return "some value for external"
def set_external_value(self, val):
print "setter for external was called"
return
pass
# ----------------------------------------------------------------------
class MyObserver (Observer):
"""Since version 1.0.0, base class 'Observer' is provided to
create observers that are not necessarily derived from Controller"""
# notifications
def property_internal_value_change(self, model, old, new):
print "internal changed!"
return
def property_external_value_change(self, model, old, new):
print "external changed!"
return
pass
# Look at what happens to the observer
if __name__ == "__main__":
m = MyModel()
c = MyObserver(m)
m.internal = 20
m.external = "a new value"
pass
|
<commit_before><commit_msg>TEST
New test/example for the new feature supporting custom getter/setter for
properties
[RC]<commit_after>
|
# Author: Roberto Cavada <cavada@fbk.eu>
#
# Copyright (c) 2006 by Roberto Cavada
#
# pygtkmvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# pygtkmvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
#
# For more information on pygtkmvc see <http://pygtkmvc.sourceforge.net>
# or email to the author Roberto Cavada <cavada@fbk.eu>.
# Please report bugs to <cavada@fbk.eu>.
# ----------------------------------------------------------------------
# In this example the use of observable properties is shown.
# The example does not need a view and a controller, as only
# the model side (and an observer) is used.
# ----------------------------------------------------------------------
import _importer
from gtkmvc import Model
from gtkmvc import Observer
# ----------------------------------------------------------------------
class MyModel (Model):
internal = 0
# external here is a property that is not stored internally, but
# handled by a pair of methods (a getter and a setter)
__observables__ = ["internal", "external"]
def get_external_value(self): return "some value for external"
def set_external_value(self, val):
print "setter for external was called"
return
pass
# ----------------------------------------------------------------------
class MyObserver (Observer):
"""Since version 1.0.0, base class 'Observer' is provided to
create observers that are not necessarily derived from Controller"""
# notifications
def property_internal_value_change(self, model, old, new):
print "internal changed!"
return
def property_external_value_change(self, model, old, new):
print "external changed!"
return
pass
# Look at what happens to the observer
if __name__ == "__main__":
m = MyModel()
c = MyObserver(m)
m.internal = 20
m.external = "a new value"
pass
|
TEST
New test/example for the new feature supporting custom getter/setter for
properties
[RC]# Author: Roberto Cavada <cavada@fbk.eu>
#
# Copyright (c) 2006 by Roberto Cavada
#
# pygtkmvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# pygtkmvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
#
# For more information on pygtkmvc see <http://pygtkmvc.sourceforge.net>
# or email to the author Roberto Cavada <cavada@fbk.eu>.
# Please report bugs to <cavada@fbk.eu>.
# ----------------------------------------------------------------------
# In this example the use of observable properties is shown.
# The example does not need a view and a controller, as only
# the model side (and an observer) is used.
# ----------------------------------------------------------------------
import _importer
from gtkmvc import Model
from gtkmvc import Observer
# ----------------------------------------------------------------------
class MyModel (Model):
internal = 0
# external here is a property that is not stored internally, but
# handled by a pair of methods (a getter and a setter)
__observables__ = ["internal", "external"]
def get_external_value(self): return "some value for external"
def set_external_value(self, val):
print "setter for external was called"
return
pass
# ----------------------------------------------------------------------
class MyObserver (Observer):
"""Since version 1.0.0, base class 'Observer' is provided to
create observers that are not necessarily derived from Controller"""
# notifications
def property_internal_value_change(self, model, old, new):
print "internal changed!"
return
def property_external_value_change(self, model, old, new):
print "external changed!"
return
pass
# Look at what happens to the observer
if __name__ == "__main__":
m = MyModel()
c = MyObserver(m)
m.internal = 20
m.external = "a new value"
pass
|
<commit_before><commit_msg>TEST
New test/example for the new feature supporting custom getter/setter for
properties
[RC]<commit_after># Author: Roberto Cavada <cavada@fbk.eu>
#
# Copyright (c) 2006 by Roberto Cavada
#
# pygtkmvc is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# pygtkmvc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110, USA.
#
# For more information on pygtkmvc see <http://pygtkmvc.sourceforge.net>
# or email to the author Roberto Cavada <cavada@fbk.eu>.
# Please report bugs to <cavada@fbk.eu>.
# ----------------------------------------------------------------------
# In this example the use of observable properties is shown.
# The example does not need a view and a controller, as only
# the model side (and an observer) is used.
# ----------------------------------------------------------------------
import _importer
from gtkmvc import Model
from gtkmvc import Observer
# ----------------------------------------------------------------------
class MyModel (Model):
internal = 0
# external here is a property that is not stored internally, but
# handled by a pair of methods (a getter and a setter)
__observables__ = ["internal", "external"]
def get_external_value(self): return "some value for external"
def set_external_value(self, val):
print "setter for external was called"
return
pass
# ----------------------------------------------------------------------
class MyObserver (Observer):
"""Since version 1.0.0, base class 'Observer' is provided to
create observers that are not necessarily derived from Controller"""
# notifications
def property_internal_value_change(self, model, old, new):
print "internal changed!"
return
def property_external_value_change(self, model, old, new):
print "external changed!"
return
pass
# Look at what happens to the observer
if __name__ == "__main__":
m = MyModel()
c = MyObserver(m)
m.internal = 20
m.external = "a new value"
pass
|
|
6d54528c74a5e0074bbddaabb7db803b655d06c3
|
array/merge-sort.py
|
array/merge-sort.py
|
# python implementation of merge sort
def merge_sort(arr):
if len(arr) > 1:
middle = len(arr)/2
left = arr[:middle]
right = arr[middle:]
merge_sort(left)
merge_sort(right)
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i = i + 1
else:
arr[k] = right[j]
j = j + 1
k = k + 1
while i < len(left):
arr[k] = left[i]
i = i + 1
k = k + 1
while j < len(right):
arr[k] = right[j]
j = j + 1
k = k + 1
return arr
|
Add python implementation for merge sort method
|
Add python implementation for merge sort method
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
Add python implementation for merge sort method
|
# python implementation of merge sort
def merge_sort(arr):
if len(arr) > 1:
middle = len(arr)/2
left = arr[:middle]
right = arr[middle:]
merge_sort(left)
merge_sort(right)
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i = i + 1
else:
arr[k] = right[j]
j = j + 1
k = k + 1
while i < len(left):
arr[k] = left[i]
i = i + 1
k = k + 1
while j < len(right):
arr[k] = right[j]
j = j + 1
k = k + 1
return arr
|
<commit_before><commit_msg>Add python implementation for merge sort method<commit_after>
|
# python implementation of merge sort
def merge_sort(arr):
if len(arr) > 1:
middle = len(arr)/2
left = arr[:middle]
right = arr[middle:]
merge_sort(left)
merge_sort(right)
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i = i + 1
else:
arr[k] = right[j]
j = j + 1
k = k + 1
while i < len(left):
arr[k] = left[i]
i = i + 1
k = k + 1
while j < len(right):
arr[k] = right[j]
j = j + 1
k = k + 1
return arr
|
Add python implementation for merge sort method# python implementation of merge sort
def merge_sort(arr):
if len(arr) > 1:
middle = len(arr)/2
left = arr[:middle]
right = arr[middle:]
merge_sort(left)
merge_sort(right)
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i = i + 1
else:
arr[k] = right[j]
j = j + 1
k = k + 1
while i < len(left):
arr[k] = left[i]
i = i + 1
k = k + 1
while j < len(right):
arr[k] = right[j]
j = j + 1
k = k + 1
return arr
|
<commit_before><commit_msg>Add python implementation for merge sort method<commit_after># python implementation of merge sort
def merge_sort(arr):
if len(arr) > 1:
middle = len(arr)/2
left = arr[:middle]
right = arr[middle:]
merge_sort(left)
merge_sort(right)
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] < right[j]:
arr[k] = left[i]
i = i + 1
else:
arr[k] = right[j]
j = j + 1
k = k + 1
while i < len(left):
arr[k] = left[i]
i = i + 1
k = k + 1
while j < len(right):
arr[k] = right[j]
j = j + 1
k = k + 1
return arr
|
|
896286d2e031c621c0a93e30dac152b606453c4c
|
celery/run_carrizo.py
|
celery/run_carrizo.py
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
Add test script using Carrizo data
|
Add test script using Carrizo data
|
Python
|
mit
|
rmsare/scarplet,stgl/scarplet
|
Add test script using Carrizo data
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
<commit_before><commit_msg>Add test script using Carrizo data<commit_after>
|
import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
Add test script using Carrizo dataimport dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
<commit_before><commit_msg>Add test script using Carrizo data<commit_after>import dem
import tasks
from celery import *
carrizo = dem.DEMGrid('tests/data/carrizo.tif')
d = 100
max_age = 10**3.5
age_step = 1
num_ages = max_age/age_step
num_angles = 180
ages = np.linspace(0, max_age, num=num_ages)
angles = np.linspace(-np.pi/2, np.pi/2, num=num_angles)
template_fits = [tasks.match_template.s(carrizo, d, age, alpha) for age in ages for alpha in angles]
compare_callback = tasks.compare_fits.s()
res = chord(template_fits)(compare_callback)
|
|
108b7f7943a8338758ae55063d9eb21440c6297e
|
tests/utils_test.py
|
tests/utils_test.py
|
from utils import reverse_complement, gc_content, distance
def test_reverse_complement():
assert reverse_complement('ATCG') == 'CGAT'
assert reverse_complement('AAAA') == 'TTTT'
assert reverse_complement('GACT') == 'AGTC'
def test_gc_content():
assert gc_content('ACGT') == 0.5
assert gc_content('ATAT') == 0
assert gc_content('GCGC') == 1
# GC Content in a sequence should be the same in its reverse complement
s = 'ACGATACGAGCCATT'
assert gc_content(s) == gc_content(reverse_complement(s))
def test_distance():
assert distance('GCAT', 'GCAT') == 0
assert distance('GCAT', 'ATGA') == 4
assert distance('GCAT', 'GACT') == 2
|
Add tests for utility methods
|
Add tests for utility methods
|
Python
|
mit
|
MichaelAquilina/rosalind-solutions
|
Add tests for utility methods
|
from utils import reverse_complement, gc_content, distance
def test_reverse_complement():
assert reverse_complement('ATCG') == 'CGAT'
assert reverse_complement('AAAA') == 'TTTT'
assert reverse_complement('GACT') == 'AGTC'
def test_gc_content():
assert gc_content('ACGT') == 0.5
assert gc_content('ATAT') == 0
assert gc_content('GCGC') == 1
# GC Content in a sequence should be the same in its reverse complement
s = 'ACGATACGAGCCATT'
assert gc_content(s) == gc_content(reverse_complement(s))
def test_distance():
assert distance('GCAT', 'GCAT') == 0
assert distance('GCAT', 'ATGA') == 4
assert distance('GCAT', 'GACT') == 2
|
<commit_before><commit_msg>Add tests for utility methods<commit_after>
|
from utils import reverse_complement, gc_content, distance
def test_reverse_complement():
assert reverse_complement('ATCG') == 'CGAT'
assert reverse_complement('AAAA') == 'TTTT'
assert reverse_complement('GACT') == 'AGTC'
def test_gc_content():
assert gc_content('ACGT') == 0.5
assert gc_content('ATAT') == 0
assert gc_content('GCGC') == 1
# GC Content in a sequence should be the same in its reverse complement
s = 'ACGATACGAGCCATT'
assert gc_content(s) == gc_content(reverse_complement(s))
def test_distance():
assert distance('GCAT', 'GCAT') == 0
assert distance('GCAT', 'ATGA') == 4
assert distance('GCAT', 'GACT') == 2
|
Add tests for utility methodsfrom utils import reverse_complement, gc_content, distance
def test_reverse_complement():
assert reverse_complement('ATCG') == 'CGAT'
assert reverse_complement('AAAA') == 'TTTT'
assert reverse_complement('GACT') == 'AGTC'
def test_gc_content():
assert gc_content('ACGT') == 0.5
assert gc_content('ATAT') == 0
assert gc_content('GCGC') == 1
# GC Content in a sequence should be the same in its reverse complement
s = 'ACGATACGAGCCATT'
assert gc_content(s) == gc_content(reverse_complement(s))
def test_distance():
assert distance('GCAT', 'GCAT') == 0
assert distance('GCAT', 'ATGA') == 4
assert distance('GCAT', 'GACT') == 2
|
<commit_before><commit_msg>Add tests for utility methods<commit_after>from utils import reverse_complement, gc_content, distance
def test_reverse_complement():
assert reverse_complement('ATCG') == 'CGAT'
assert reverse_complement('AAAA') == 'TTTT'
assert reverse_complement('GACT') == 'AGTC'
def test_gc_content():
assert gc_content('ACGT') == 0.5
assert gc_content('ATAT') == 0
assert gc_content('GCGC') == 1
# GC Content in a sequence should be the same in its reverse complement
s = 'ACGATACGAGCCATT'
assert gc_content(s) == gc_content(reverse_complement(s))
def test_distance():
assert distance('GCAT', 'GCAT') == 0
assert distance('GCAT', 'ATGA') == 4
assert distance('GCAT', 'GACT') == 2
|
|
5737213ab2bc2ed041c71dd4e6d698ff00820038
|
source/harmony/schema/processor.py
|
source/harmony/schema/processor.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from abc import ABCMeta, abstractmethod
class Processor(object):
'''Process schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def process(self, schemas):
'''Process *schemas*
:py:class:`collection <harmony.schema.collection.Collection>`.
Return a new :py:class:`~harmony.schema.collection.Collection` of
*schemas* after processing.
'''
|
Add Processor interface for post processing of schemas.
|
Add Processor interface for post processing of schemas.
|
Python
|
apache-2.0
|
4degrees/harmony
|
Add Processor interface for post processing of schemas.
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from abc import ABCMeta, abstractmethod
class Processor(object):
'''Process schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def process(self, schemas):
'''Process *schemas*
:py:class:`collection <harmony.schema.collection.Collection>`.
Return a new :py:class:`~harmony.schema.collection.Collection` of
*schemas* after processing.
'''
|
<commit_before><commit_msg>Add Processor interface for post processing of schemas.<commit_after>
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from abc import ABCMeta, abstractmethod
class Processor(object):
'''Process schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def process(self, schemas):
'''Process *schemas*
:py:class:`collection <harmony.schema.collection.Collection>`.
Return a new :py:class:`~harmony.schema.collection.Collection` of
*schemas* after processing.
'''
|
Add Processor interface for post processing of schemas.# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from abc import ABCMeta, abstractmethod
class Processor(object):
'''Process schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def process(self, schemas):
'''Process *schemas*
:py:class:`collection <harmony.schema.collection.Collection>`.
Return a new :py:class:`~harmony.schema.collection.Collection` of
*schemas* after processing.
'''
|
<commit_before><commit_msg>Add Processor interface for post processing of schemas.<commit_after># :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from abc import ABCMeta, abstractmethod
class Processor(object):
'''Process schemas.'''
__metaclass__ = ABCMeta
@abstractmethod
def process(self, schemas):
'''Process *schemas*
:py:class:`collection <harmony.schema.collection.Collection>`.
Return a new :py:class:`~harmony.schema.collection.Collection` of
*schemas* after processing.
'''
|
|
4e3e14e5d916c229876fb2faae03082ed65d0918
|
test/test_sparql_base_ref.py
|
test/test_sparql_base_ref.py
|
from rdflib import ConjunctiveGraph, Literal
from StringIO import StringIO
import unittest
test_data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<http://example.org/alice> a foaf:Person;
foaf:name "Alice";
foaf:knows <http://example.org/bob> ."""
test_query = """
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { [ a :Person; :name ?name ]. }
}"""
class TestSparqlJsonResults(unittest.TestCase):
def setUp(self):
self.graph = ConjunctiveGraph()
self.graph.parse(StringIO(test_data), format="n3")
def test_base_ref(self):
rt=self.graph.query(test_query).serialize("python")
self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
if __name__ == "__main__":
unittest.main()
|
Test for use of BASE <..>
|
Test for use of BASE <..>
|
Python
|
bsd-3-clause
|
Letractively/rdflib,Letractively/rdflib,Letractively/rdflib
|
Test for use of BASE <..>
|
from rdflib import ConjunctiveGraph, Literal
from StringIO import StringIO
import unittest
test_data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<http://example.org/alice> a foaf:Person;
foaf:name "Alice";
foaf:knows <http://example.org/bob> ."""
test_query = """
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { [ a :Person; :name ?name ]. }
}"""
class TestSparqlJsonResults(unittest.TestCase):
def setUp(self):
self.graph = ConjunctiveGraph()
self.graph.parse(StringIO(test_data), format="n3")
def test_base_ref(self):
rt=self.graph.query(test_query).serialize("python")
self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test for use of BASE <..><commit_after>
|
from rdflib import ConjunctiveGraph, Literal
from StringIO import StringIO
import unittest
test_data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<http://example.org/alice> a foaf:Person;
foaf:name "Alice";
foaf:knows <http://example.org/bob> ."""
test_query = """
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { [ a :Person; :name ?name ]. }
}"""
class TestSparqlJsonResults(unittest.TestCase):
def setUp(self):
self.graph = ConjunctiveGraph()
self.graph.parse(StringIO(test_data), format="n3")
def test_base_ref(self):
rt=self.graph.query(test_query).serialize("python")
self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
if __name__ == "__main__":
unittest.main()
|
Test for use of BASE <..>from rdflib import ConjunctiveGraph, Literal
from StringIO import StringIO
import unittest
test_data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<http://example.org/alice> a foaf:Person;
foaf:name "Alice";
foaf:knows <http://example.org/bob> ."""
test_query = """
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { [ a :Person; :name ?name ]. }
}"""
class TestSparqlJsonResults(unittest.TestCase):
def setUp(self):
self.graph = ConjunctiveGraph()
self.graph.parse(StringIO(test_data), format="n3")
def test_base_ref(self):
rt=self.graph.query(test_query).serialize("python")
self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test for use of BASE <..><commit_after>from rdflib import ConjunctiveGraph, Literal
from StringIO import StringIO
import unittest
test_data = """
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<http://example.org/alice> a foaf:Person;
foaf:name "Alice";
foaf:knows <http://example.org/bob> ."""
test_query = """
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { [ a :Person; :name ?name ]. }
}"""
class TestSparqlJsonResults(unittest.TestCase):
def setUp(self):
self.graph = ConjunctiveGraph()
self.graph.parse(StringIO(test_data), format="n3")
def test_base_ref(self):
rt=self.graph.query(test_query).serialize("python")
self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt)
if __name__ == "__main__":
unittest.main()
|
|
45cd0fa6cfaff7d82cedde85a90a20f64bb13a30
|
PRESUBMIT.py
|
PRESUBMIT.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_EXCLUDED_PATHS = (
)
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
Add a basic presubmit script.
|
Add a basic presubmit script.
|
Python
|
bsd-3-clause
|
sahiljain/catapult,scottmcmaster/catapult,zeptonaut/catapult,SummerLW/Perf-Insight-Report,scottmcmaster/catapult,danbeam/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,scottmcmaster/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,modulexcite/catapult,catapult-project/catapult-csm,dstockwell/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,dstockwell/catapult,zeptonaut/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,dstockwell/catapult,sahiljain/catapult,benschmaus/catapult,0x90sled/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,zeptonaut/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,dstockwell/catapult,danbeam/catapult,catapult-project/catapult,danbeam/catapult,0x90sled/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,vmpstr/trace-viewer,vmpstr/trace-viewer,sahiljain/catapult,catapult-project/catapult,0x90sled/catapult,catapult-project/catapult,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,modulexcite/catapult,catapult-project/catapult-csm,sahiljain/catapult,vmpstr/trace-viewer,modulexcite/catapult,danbeam/catapult,catapult-project/catapult-csm
|
Add a basic presubmit script.
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_EXCLUDED_PATHS = (
)
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
<commit_before><commit_msg>Add a basic presubmit script.<commit_after>
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_EXCLUDED_PATHS = (
)
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
Add a basic presubmit script.# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_EXCLUDED_PATHS = (
)
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
<commit_before><commit_msg>Add a basic presubmit script.<commit_after># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_EXCLUDED_PATHS = (
)
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
|
e9720a993332f4d08a76f49828ad87378c58abd2
|
tests/unit/cli/test_utils.py
|
tests/unit/cli/test_utils.py
|
from awscfncli2.cli.utils.colormaps import STACK_STATUS_TO_COLOR
def test_stack_status_to_color():
assert STACK_STATUS_TO_COLOR['UPDATE_COMPLETE'] == {'fg': 'green'}
assert STACK_STATUS_TO_COLOR['CREATE_FAILED'] == {'fg': 'red'}
assert STACK_STATUS_TO_COLOR['IMPORT_IN_PROGRESS'] == {'fg': 'yellow'}
# invalid status should return a empty dict instead of KeyError
assert STACK_STATUS_TO_COLOR['FOOBAR'] == {}
|
Add a test case for colormap behavior change.
|
Add a test case for colormap behavior change.
|
Python
|
mit
|
Kotaimen/awscfncli,Kotaimen/awscfncli
|
Add a test case for colormap behavior change.
|
from awscfncli2.cli.utils.colormaps import STACK_STATUS_TO_COLOR
def test_stack_status_to_color():
assert STACK_STATUS_TO_COLOR['UPDATE_COMPLETE'] == {'fg': 'green'}
assert STACK_STATUS_TO_COLOR['CREATE_FAILED'] == {'fg': 'red'}
assert STACK_STATUS_TO_COLOR['IMPORT_IN_PROGRESS'] == {'fg': 'yellow'}
# invalid status should return a empty dict instead of KeyError
assert STACK_STATUS_TO_COLOR['FOOBAR'] == {}
|
<commit_before><commit_msg>Add a test case for colormap behavior change.<commit_after>
|
from awscfncli2.cli.utils.colormaps import STACK_STATUS_TO_COLOR
def test_stack_status_to_color():
assert STACK_STATUS_TO_COLOR['UPDATE_COMPLETE'] == {'fg': 'green'}
assert STACK_STATUS_TO_COLOR['CREATE_FAILED'] == {'fg': 'red'}
assert STACK_STATUS_TO_COLOR['IMPORT_IN_PROGRESS'] == {'fg': 'yellow'}
# invalid status should return a empty dict instead of KeyError
assert STACK_STATUS_TO_COLOR['FOOBAR'] == {}
|
Add a test case for colormap behavior change.from awscfncli2.cli.utils.colormaps import STACK_STATUS_TO_COLOR
def test_stack_status_to_color():
assert STACK_STATUS_TO_COLOR['UPDATE_COMPLETE'] == {'fg': 'green'}
assert STACK_STATUS_TO_COLOR['CREATE_FAILED'] == {'fg': 'red'}
assert STACK_STATUS_TO_COLOR['IMPORT_IN_PROGRESS'] == {'fg': 'yellow'}
# invalid status should return a empty dict instead of KeyError
assert STACK_STATUS_TO_COLOR['FOOBAR'] == {}
|
<commit_before><commit_msg>Add a test case for colormap behavior change.<commit_after>from awscfncli2.cli.utils.colormaps import STACK_STATUS_TO_COLOR
def test_stack_status_to_color():
assert STACK_STATUS_TO_COLOR['UPDATE_COMPLETE'] == {'fg': 'green'}
assert STACK_STATUS_TO_COLOR['CREATE_FAILED'] == {'fg': 'red'}
assert STACK_STATUS_TO_COLOR['IMPORT_IN_PROGRESS'] == {'fg': 'yellow'}
# invalid status should return a empty dict instead of KeyError
assert STACK_STATUS_TO_COLOR['FOOBAR'] == {}
|
|
d4be0acb6a33989cdb487ac98efe79af60bb87fb
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
Add pytest config file for slow tests.
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.
|
Python
|
bsd-3-clause
|
achabotl/pambox
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
<commit_before><commit_msg>Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.<commit_after>
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
<commit_before><commit_msg>Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.<commit_after>import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
|
e3cad9bfa134900218bb8502698f8bcf147ff474
|
tools/DICOM_constant_filter.py
|
tools/DICOM_constant_filter.py
|
# DICOM_constant_filter.py
# Strip out a list of all constant names from the list of source files in the location provided
# and output C source of an array of {char*,int}
from __future__ import print_function
import re
import os
InstallDir = os.path.join("C:\\", "lib")
DICOMBase = os.path.join(InstallDir, "DCMTK-debug", "include", "dcmtk", "dcmdata")
files = [
"dcdeftag.h"
]
for f in files:
filename = os.path.join(DICOMBase, f)
print(filename)
fh = open(filename)
for line in fh:
for tok in line.split(): # split on whitespace
if re.match("^DCM_", tok): # match SDL constants
print('{{ "{0}", {0} }}'.format(tok))
fh.close()
|
Add a python script to filter out all DICOM constants.
|
Add a python script to filter out all DICOM constants.
|
Python
|
mit
|
jimbo00000/Rift-Volume,jimbo00000/Rift-Volume
|
Add a python script to filter out all DICOM constants.
|
# DICOM_constant_filter.py
# Strip out a list of all constant names from the list of source files in the location provided
# and output C source of an array of {char*,int}
from __future__ import print_function
import re
import os
InstallDir = os.path.join("C:\\", "lib")
DICOMBase = os.path.join(InstallDir, "DCMTK-debug", "include", "dcmtk", "dcmdata")
files = [
"dcdeftag.h"
]
for f in files:
filename = os.path.join(DICOMBase, f)
print(filename)
fh = open(filename)
for line in fh:
for tok in line.split(): # split on whitespace
if re.match("^DCM_", tok): # match SDL constants
print('{{ "{0}", {0} }}'.format(tok))
fh.close()
|
<commit_before><commit_msg>Add a python script to filter out all DICOM constants.<commit_after>
|
# DICOM_constant_filter.py
# Strip out a list of all constant names from the list of source files in the location provided
# and output C source of an array of {char*,int}
from __future__ import print_function
import re
import os
InstallDir = os.path.join("C:\\", "lib")
DICOMBase = os.path.join(InstallDir, "DCMTK-debug", "include", "dcmtk", "dcmdata")
files = [
"dcdeftag.h"
]
for f in files:
filename = os.path.join(DICOMBase, f)
print(filename)
fh = open(filename)
for line in fh:
for tok in line.split(): # split on whitespace
if re.match("^DCM_", tok): # match SDL constants
print('{{ "{0}", {0} }}'.format(tok))
fh.close()
|
Add a python script to filter out all DICOM constants.# DICOM_constant_filter.py
# Strip out a list of all constant names from the list of source files in the location provided
# and output C source of an array of {char*,int}
from __future__ import print_function
import re
import os
InstallDir = os.path.join("C:\\", "lib")
DICOMBase = os.path.join(InstallDir, "DCMTK-debug", "include", "dcmtk", "dcmdata")
files = [
"dcdeftag.h"
]
for f in files:
filename = os.path.join(DICOMBase, f)
print(filename)
fh = open(filename)
for line in fh:
for tok in line.split(): # split on whitespace
if re.match("^DCM_", tok): # match SDL constants
print('{{ "{0}", {0} }}'.format(tok))
fh.close()
|
<commit_before><commit_msg>Add a python script to filter out all DICOM constants.<commit_after># DICOM_constant_filter.py
# Strip out a list of all constant names from the list of source files in the location provided
# and output C source of an array of {char*,int}
from __future__ import print_function
import re
import os
InstallDir = os.path.join("C:\\", "lib")
DICOMBase = os.path.join(InstallDir, "DCMTK-debug", "include", "dcmtk", "dcmdata")
files = [
"dcdeftag.h"
]
for f in files:
filename = os.path.join(DICOMBase, f)
print(filename)
fh = open(filename)
for line in fh:
for tok in line.split(): # split on whitespace
if re.match("^DCM_", tok): # match SDL constants
print('{{ "{0}", {0} }}'.format(tok))
fh.close()
|
|
80c1af071c0942060a00de40f14c5ab5fec36e0d
|
problem35.py
|
problem35.py
|
#!/usr/bin/env python
"""
A solution for problem 35 from Project Euler.
https://projecteuler.net/problem=35
The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and
719, are themselves prime.
There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.
How many circular primes are there below one million?
"""
import time
from primes import generate_list_of_primes, is_circular_prime
def problem_thirty_five(total):
count = 0
primes = generate_list_of_primes(1, total)
for num in primes:
if is_circular_prime(num):
count += 1
print "Found %s primes under %s" % (count, total)
if __name__ == "__main__":
start = time.time()
problem_thirty_five(100)
end = time.time()
print "Solution for 100 primes took %f seconds" % (end - start)
start = time.time()
problem_thirty_five(10 ** 6)
end = time.time()
print "Solution for 1 million primes took %f seconds" % (end - start)
|
Add a solution for problem 35.
|
Add a solution for problem 35.
|
Python
|
mit
|
smillet15/project-euler
|
Add a solution for problem 35.
|
#!/usr/bin/env python
"""
A solution for problem 35 from Project Euler.
https://projecteuler.net/problem=35
The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and
719, are themselves prime.
There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.
How many circular primes are there below one million?
"""
import time
from primes import generate_list_of_primes, is_circular_prime
def problem_thirty_five(total):
count = 0
primes = generate_list_of_primes(1, total)
for num in primes:
if is_circular_prime(num):
count += 1
print "Found %s primes under %s" % (count, total)
if __name__ == "__main__":
start = time.time()
problem_thirty_five(100)
end = time.time()
print "Solution for 100 primes took %f seconds" % (end - start)
start = time.time()
problem_thirty_five(10 ** 6)
end = time.time()
print "Solution for 1 million primes took %f seconds" % (end - start)
|
<commit_before><commit_msg>Add a solution for problem 35.<commit_after>
|
#!/usr/bin/env python
"""
A solution for problem 35 from Project Euler.
https://projecteuler.net/problem=35
The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and
719, are themselves prime.
There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.
How many circular primes are there below one million?
"""
import time
from primes import generate_list_of_primes, is_circular_prime
def problem_thirty_five(total):
count = 0
primes = generate_list_of_primes(1, total)
for num in primes:
if is_circular_prime(num):
count += 1
print "Found %s primes under %s" % (count, total)
if __name__ == "__main__":
start = time.time()
problem_thirty_five(100)
end = time.time()
print "Solution for 100 primes took %f seconds" % (end - start)
start = time.time()
problem_thirty_five(10 ** 6)
end = time.time()
print "Solution for 1 million primes took %f seconds" % (end - start)
|
Add a solution for problem 35.#!/usr/bin/env python
"""
A solution for problem 35 from Project Euler.
https://projecteuler.net/problem=35
The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and
719, are themselves prime.
There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.
How many circular primes are there below one million?
"""
import time
from primes import generate_list_of_primes, is_circular_prime
def problem_thirty_five(total):
count = 0
primes = generate_list_of_primes(1, total)
for num in primes:
if is_circular_prime(num):
count += 1
print "Found %s primes under %s" % (count, total)
if __name__ == "__main__":
start = time.time()
problem_thirty_five(100)
end = time.time()
print "Solution for 100 primes took %f seconds" % (end - start)
start = time.time()
problem_thirty_five(10 ** 6)
end = time.time()
print "Solution for 1 million primes took %f seconds" % (end - start)
|
<commit_before><commit_msg>Add a solution for problem 35.<commit_after>#!/usr/bin/env python
"""
A solution for problem 35 from Project Euler.
https://projecteuler.net/problem=35
The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and
719, are themselves prime.
There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97.
How many circular primes are there below one million?
"""
import time
from primes import generate_list_of_primes, is_circular_prime
def problem_thirty_five(total):
count = 0
primes = generate_list_of_primes(1, total)
for num in primes:
if is_circular_prime(num):
count += 1
print "Found %s primes under %s" % (count, total)
if __name__ == "__main__":
start = time.time()
problem_thirty_five(100)
end = time.time()
print "Solution for 100 primes took %f seconds" % (end - start)
start = time.time()
problem_thirty_five(10 ** 6)
end = time.time()
print "Solution for 1 million primes took %f seconds" % (end - start)
|
|
f2fa55c8d2f94bd186fc6c47b8ce00fb87c22aaf
|
tensorflow/contrib/autograph/converters/__init__.py
|
tensorflow/contrib/autograph/converters/__init__.py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
Add a few naming guidelines for the converter library.
|
Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604
|
Python
|
apache-2.0
|
alsrgv/tensorflow,annarev/tensorflow,sarvex/tensorflow,ppwwyyxx/tensorflow,chemelnucfin/tensorflow,jalexvig/tensorflow,ageron/tensorflow,gunan/tensorflow,gautam1858/tensorflow,seanli9jan/tensorflow,girving/tensorflow,jart/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,xzturn/tensorflow,aldian/tensorflow,jhseu/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,ageron/tensorflow,brchiu/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,dancingdan/tensorflow,aldian/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,chemelnucfin/tensorflow,kobejean/tensorflow,adit-chandra/tensorflow,theflofly/tensorflow,gautam1858/tensorflow,manipopopo/tensorflow,brchiu/tensorflow,jbedorf/tensorflow,ageron/tensorflow,gunan/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,AnishShah/tensorflow,AnishShah/tensorflow,kobejean/tensorflow,alshedivat/tensorflow,davidzchen/tensorflow,adit-chandra/tensorflow,jhseu/tensorflow,kobejean/tensorflow,freedomtan/tensorflow,theflofly/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,kobejean/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,xodus7/tensorflow,alsrgv/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,gunan/tensorflow,manipopopo/tensorflow,xodus7/tensorflow,xzturn/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,ageron/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,karllessard/tensorflow,petewarden/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,gunan/tensorflow,apark263/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,xodus7/tensorflow,sarvex/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,ppwwyyxx/tensorflow,apark263/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,theflofly/tensorflow,gautam1858/tensorflow,hehongliang/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,jbedorf/tensorflow,hehongliang/tensorflow,manipopopo/tensorflow,cxxgtxy/tensorflow,seanli9jan/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,Bismarrck/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,DavidNorman/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,renyi533/tensorflow,jbedorf/tensorflow,aselle/tensorflow,annarev/tensorflow,ZhangXinNan/tensorflow,AnishShah/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,snnn/tensorflow,ghchinoy/tensorflow,jalexvig/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,ZhangXinNan/tensorflow,apark263/tensorflow,snnn/tensorflow,annarev/tensorflow,jhseu/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,snnn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,theflofly/tensorflow,snnn/tensorflow,ageron/tensorflow,brchiu/tensorflow,adit-chandra/tensorflow,alshedivat/tensorflow,apark263/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,annarev/tensorflow,aam-at/tensorflow,AnishShah/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,ZhangXinNan/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,manipopopo/tensorflow,annarev/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,sarvex/tensorflow,alshedivat/tensorflow,karllessard/tensorflow,dongjoon-hyun/tensorflow,alsrgv/tensorflow,kevin-coder/tensorflow-fork,frreiss/tensorflow-fred,gautam1858/tensorflow,manipopopo/tensorflow,karllessard/tensorflow,jart/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,jalexvig/tensorflow,jart/tensorflow,tensorflow/tensorflow,aselle/tensorflow,tensorflow/tensorflow,jbedorf/tensorflow,jendap/tensorflow,ppwwyyxx/tensorflow,apark263/tensorflow,arborh/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,gunan/tensorflow,karllessard/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,jendap/tensorflow,kevin-coder/tensorflow-fork,ageron/tensorflow,asimshankar/tensorflow,alsrgv/tensorflow,girving/tensorflow,aselle/tensorflow,arborh/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,manipopopo/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,ppwwyyxx/tensorflow,ZhangXinNan/tensorflow,chemelnucfin/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,brchiu/tensorflow,Bismarrck/tensorflow,kevin-coder/tensorflow-fork,dancingdan/tensorflow,xzturn/tensorflow,aselle/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jart/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jbedorf/tensorflow,annarev/tensorflow,ZhangXinNan/tensorflow,gunan/tensorflow,xzturn/tensorflow,aselle/tensorflow,aselle/tensorflow,AnishShah/tensorflow,dongjoon-hyun/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,gunan/tensorflow,adit-chandra/tensorflow,chemelnucfin/tensorflow,seanli9jan/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,DavidNorman/tensorflow,kobejean/tensorflow,jalexvig/tensorflow,DavidNorman/tensorflow,manipopopo/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,dancingdan/tensorflow,paolodedios/tensorflow,hehongliang/tensorflow,ghchinoy/tensorflow,alsrgv/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,alshedivat/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jart/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,AnishShah/tensorflow,cxxgtxy/tensorflow,dancingdan/tensorflow,xodus7/tensorflow,jhseu/tensorflow,manipopopo/tensorflow,annarev/tensorflow,yongtang/tensorflow,aam-at/tensorflow,dancingdan/tensorflow,jendap/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,petewarden/tensorflow,ageron/tensorflow,theflofly/tensorflow,DavidNorman/tensorflow,aam-at/tensorflow,arborh/tensorflow,brchiu/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,jart/tensorflow,jbedorf/tensorflow,renyi533/tensorflow,jart/tensorflow,ppwwyyxx/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,dongjoon-hyun/tensorflow,aldian/tensorflow,karllessard/tensorflow,apark263/tensorflow,Intel-Corporation/tensorflow,Bismarrck/tensorflow,ghchinoy/tensorflow,frreiss/tensorflow-fred,jendap/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,aldian/tensorflow,alshedivat/tensorflow,jart/tensorflow,snnn/tensorflow,ZhangXinNan/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gunan/tensorflow,arborh/tensorflow,aselle/tensorflow,seanli9jan/tensorflow,jart/tensorflow,kobejean/tensorflow,arborh/tensorflow,manipopopo/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,DavidNorman/tensorflow,adit-chandra/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,jendap/tensorflow,freedomtan/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,xzturn/tensorflow,ageron/tensorflow,kevin-coder/tensorflow-fork,yongtang/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,seanli9jan/tensorflow,ageron/tensorflow,asimshankar/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,alshedivat/tensorflow,ZhangXinNan/tensorflow,paolodedios/tensorflow,dongjoon-hyun/tensorflow,jhseu/tensorflow,theflofly/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,ghchinoy/tensorflow,girving/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,apark263/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,jbedorf/tensorflow,snnn/tensorflow,kevin-coder/tensorflow-fork,girving/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,alshedivat/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,arborh/tensorflow,petewarden/tensorflow,dancingdan/tensorflow,dongjoon-hyun/tensorflow,petewarden/tensorflow,AnishShah/tensorflow,dongjoon-hyun/tensorflow,kobejean/tensorflow,snnn/tensorflow,arborh/tensorflow,theflofly/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,theflofly/tensorflow,kevin-coder/tensorflow-fork,DavidNorman/tensorflow,dancingdan/tensorflow,jbedorf/tensorflow,girving/tensorflow,gautam1858/tensorflow,gunan/tensorflow,apark263/tensorflow,snnn/tensorflow,seanli9jan/tensorflow,hfp/tensorflow-xsmm,hfp/tensorflow-xsmm,asimshankar/tensorflow,ZhangXinNan/tensorflow,hehongliang/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,annarev/tensorflow,davidzchen/tensorflow,theflofly/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,chemelnucfin/tensorflow,girving/tensorflow,dancingdan/tensorflow,jendap/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,cxxgtxy/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,Intel-tensorflow/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,xodus7/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,dongjoon-hyun/tensorflow,girving/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,hehongliang/tensorflow,adit-chandra/tensorflow,karllessard/tensorflow,xzturn/tensorflow,dongjoon-hyun/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,brchiu/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,gunan/tensorflow,petewarden/tensorflow,aam-at/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,ZhangXinNan/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,arborh/tensorflow,seanli9jan/tensorflow,aldian/tensorflow,paolodedios/tensorflow,alshedivat/tensorflow,Bismarrck/tensorflow,AnishShah/tensorflow,hehongliang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,aam-at/tensorflow,kevin-coder/tensorflow-fork,sarvex/tensorflow,xodus7/tensorflow,gautam1858/tensorflow,ZhangXinNan/tensorflow,aam-at/tensorflow,girving/tensorflow,Bismarrck/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,DavidNorman/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,arborh/tensorflow,renyi533/tensorflow,xzturn/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_tf_optimizer,DavidNorman/tensorflow,kobejean/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,Intel-tensorflow/tensorflow,hfp/tensorflow-xsmm,jendap/tensorflow,frreiss/tensorflow-fred,seanli9jan/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,aldian/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,dongjoon-hyun/tensorflow,jart/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,snnn/tensorflow,girving/tensorflow,jalexvig/tensorflow,manipopopo/tensorflow,hfp/tensorflow-xsmm,sarvex/tensorflow,jalexvig/tensorflow,ZhangXinNan/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,dancingdan/tensorflow,Bismarrck/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,snnn/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,xzturn/tensorflow,sarvex/tensorflow,apark263/tensorflow,yongtang/tensorflow,xodus7/tensorflow,jhseu/tensorflow,kobejean/tensorflow,apark263/tensorflow,yongtang/tensorflow,arborh/tensorflow,alsrgv/tensorflow,jalexvig/tensorflow,ageron/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,arborh/tensorflow,yongtang/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,xodus7/tensorflow,kobejean/tensorflow,girving/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,jhseu/tensorflow,brchiu/tensorflow,alshedivat/tensorflow,AnishShah/tensorflow,arborh/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,annarev/tensorflow,paolodedios/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,frreiss/tensorflow-fred,hehongliang/tensorflow,annarev/tensorflow,snnn/tensorflow,gunan/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,aselle/tensorflow,alshedivat/tensorflow,ageron/tensorflow,frreiss/tensorflow-fred,asimshankar/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,chemelnucfin/tensorflow,aselle/tensorflow,chemelnucfin/tensorflow,jendap/tensorflow
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
<commit_before># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
<commit_msg>Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604<commit_after>
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
<commit_before># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Define a base transformer class that can recognize skip_processing
# TODO(mdan): All converters are incomplete, especially those that change blocks
<commit_msg>Add a few naming guidelines for the converter library.
PiperOrigin-RevId: 204199604<commit_after># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code converters used by Autograph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Naming conventions:
# * each converter should specialize on a single idiom; be consistent with
# the Python reference for naming
# * all converters inherit core.converter.Base
# * module names describe the idiom that the converter covers, plural
# * the converter class is named consistent with the module, singular and
# includes the word Transformer
#
# Example:
#
# lists.py
# class ListTransformer(converter.Base)
|
e2b3934dfa9793759802afb2204d5068f21fc2d1
|
websockets/test_handshake.py
|
websockets/test_handshake.py
|
import unittest
from .handshake import *
from .handshake import accept # private API
class HandshakeTests(unittest.TestCase):
def test_accept(self):
# Test vector from RFC 6455
key = "dGhlIHNhbXBsZSBub25jZQ=="
acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
self.assertEqual(accept(key), acc)
def test_round_trip(self):
request_headers = {}
request_key = build_request(request_headers.__setitem__)
response_key = check_request(request_headers.__getitem__)
self.assertEqual(request_key, response_key)
response_headers = {}
build_response(response_headers.__setitem__, response_key)
check_response(response_headers.__getitem__, request_key)
def test_bad_request(self):
headers = {}
build_request(headers.__setitem__)
del headers['Sec-WebSocket-Key']
with self.assertRaises(InvalidHandshake):
check_request(headers.__getitem__)
def test_bad_response(self):
headers = {}
build_response(headers.__setitem__, 'blabla')
del headers['Sec-WebSocket-Accept']
with self.assertRaises(InvalidHandshake):
check_response(headers.__getitem__, 'blabla')
|
Add tests for the handshake functions.
|
Add tests for the handshake functions.
|
Python
|
bsd-3-clause
|
aaugustin/websockets,aaugustin/websockets,aaugustin/websockets,dommert/pywebsockets,aaugustin/websockets,andrewyoung1991/websockets,biddyweb/websockets
|
Add tests for the handshake functions.
|
import unittest
from .handshake import *
from .handshake import accept # private API
class HandshakeTests(unittest.TestCase):
def test_accept(self):
# Test vector from RFC 6455
key = "dGhlIHNhbXBsZSBub25jZQ=="
acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
self.assertEqual(accept(key), acc)
def test_round_trip(self):
request_headers = {}
request_key = build_request(request_headers.__setitem__)
response_key = check_request(request_headers.__getitem__)
self.assertEqual(request_key, response_key)
response_headers = {}
build_response(response_headers.__setitem__, response_key)
check_response(response_headers.__getitem__, request_key)
def test_bad_request(self):
headers = {}
build_request(headers.__setitem__)
del headers['Sec-WebSocket-Key']
with self.assertRaises(InvalidHandshake):
check_request(headers.__getitem__)
def test_bad_response(self):
headers = {}
build_response(headers.__setitem__, 'blabla')
del headers['Sec-WebSocket-Accept']
with self.assertRaises(InvalidHandshake):
check_response(headers.__getitem__, 'blabla')
|
<commit_before><commit_msg>Add tests for the handshake functions.<commit_after>
|
import unittest
from .handshake import *
from .handshake import accept # private API
class HandshakeTests(unittest.TestCase):
def test_accept(self):
# Test vector from RFC 6455
key = "dGhlIHNhbXBsZSBub25jZQ=="
acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
self.assertEqual(accept(key), acc)
def test_round_trip(self):
request_headers = {}
request_key = build_request(request_headers.__setitem__)
response_key = check_request(request_headers.__getitem__)
self.assertEqual(request_key, response_key)
response_headers = {}
build_response(response_headers.__setitem__, response_key)
check_response(response_headers.__getitem__, request_key)
def test_bad_request(self):
headers = {}
build_request(headers.__setitem__)
del headers['Sec-WebSocket-Key']
with self.assertRaises(InvalidHandshake):
check_request(headers.__getitem__)
def test_bad_response(self):
headers = {}
build_response(headers.__setitem__, 'blabla')
del headers['Sec-WebSocket-Accept']
with self.assertRaises(InvalidHandshake):
check_response(headers.__getitem__, 'blabla')
|
Add tests for the handshake functions.import unittest
from .handshake import *
from .handshake import accept # private API
class HandshakeTests(unittest.TestCase):
def test_accept(self):
# Test vector from RFC 6455
key = "dGhlIHNhbXBsZSBub25jZQ=="
acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
self.assertEqual(accept(key), acc)
def test_round_trip(self):
request_headers = {}
request_key = build_request(request_headers.__setitem__)
response_key = check_request(request_headers.__getitem__)
self.assertEqual(request_key, response_key)
response_headers = {}
build_response(response_headers.__setitem__, response_key)
check_response(response_headers.__getitem__, request_key)
def test_bad_request(self):
headers = {}
build_request(headers.__setitem__)
del headers['Sec-WebSocket-Key']
with self.assertRaises(InvalidHandshake):
check_request(headers.__getitem__)
def test_bad_response(self):
headers = {}
build_response(headers.__setitem__, 'blabla')
del headers['Sec-WebSocket-Accept']
with self.assertRaises(InvalidHandshake):
check_response(headers.__getitem__, 'blabla')
|
<commit_before><commit_msg>Add tests for the handshake functions.<commit_after>import unittest
from .handshake import *
from .handshake import accept # private API
class HandshakeTests(unittest.TestCase):
def test_accept(self):
# Test vector from RFC 6455
key = "dGhlIHNhbXBsZSBub25jZQ=="
acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="
self.assertEqual(accept(key), acc)
def test_round_trip(self):
request_headers = {}
request_key = build_request(request_headers.__setitem__)
response_key = check_request(request_headers.__getitem__)
self.assertEqual(request_key, response_key)
response_headers = {}
build_response(response_headers.__setitem__, response_key)
check_response(response_headers.__getitem__, request_key)
def test_bad_request(self):
headers = {}
build_request(headers.__setitem__)
del headers['Sec-WebSocket-Key']
with self.assertRaises(InvalidHandshake):
check_request(headers.__getitem__)
def test_bad_response(self):
headers = {}
build_response(headers.__setitem__, 'blabla')
del headers['Sec-WebSocket-Accept']
with self.assertRaises(InvalidHandshake):
check_response(headers.__getitem__, 'blabla')
|
|
bae805afa67bbced9a968b12c38e1b22e05f8f61
|
Findminn.py
|
Findminn.py
|
__author__ = "Claytonbat"
from random import randrange
import time
def findMin(alist):
overallmin = alist[0]
for i in alist:
issmallest = True
for j in alist :
if i > j:
issmallest = False
if issmallest:
overallmin = i
return overallmin
def findMin1(alist):
temp_min = alist[0]
for i in alist:
if temp_min > i:
temp_min = i
return temp_min
#print(findMin([5,4,3,2,1,0]))
for listSize in range(1000,10001,1000):
alist = [randrange(100000) for x in range(listSize)]
start = time.time()
print (findMin(alist))
end = time.time()
start1 = time.time()
print (findMin1(alist))
end1 = time.time()
print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
|
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
|
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add author and email.
|
Python
|
mit
|
mcsoo/Exercises
|
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add author and email.
|
__author__ = "Claytonbat"
from random import randrange
import time
def findMin(alist):
overallmin = alist[0]
for i in alist:
issmallest = True
for j in alist :
if i > j:
issmallest = False
if issmallest:
overallmin = i
return overallmin
def findMin1(alist):
temp_min = alist[0]
for i in alist:
if temp_min > i:
temp_min = i
return temp_min
#print(findMin([5,4,3,2,1,0]))
for listSize in range(1000,10001,1000):
alist = [randrange(100000) for x in range(listSize)]
start = time.time()
print (findMin(alist))
end = time.time()
start1 = time.time()
print (findMin1(alist))
end1 = time.time()
print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
|
<commit_before><commit_msg>Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add author and email.<commit_after>
|
__author__ = "Claytonbat"
from random import randrange
import time
def findMin(alist):
overallmin = alist[0]
for i in alist:
issmallest = True
for j in alist :
if i > j:
issmallest = False
if issmallest:
overallmin = i
return overallmin
def findMin1(alist):
temp_min = alist[0]
for i in alist:
if temp_min > i:
temp_min = i
return temp_min
#print(findMin([5,4,3,2,1,0]))
for listSize in range(1000,10001,1000):
alist = [randrange(100000) for x in range(listSize)]
start = time.time()
print (findMin(alist))
end = time.time()
start1 = time.time()
print (findMin1(alist))
end1 = time.time()
print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
|
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add author and email.__author__ = "Claytonbat"
from random import randrange
import time
def findMin(alist):
overallmin = alist[0]
for i in alist:
issmallest = True
for j in alist :
if i > j:
issmallest = False
if issmallest:
overallmin = i
return overallmin
def findMin1(alist):
temp_min = alist[0]
for i in alist:
if temp_min > i:
temp_min = i
return temp_min
#print(findMin([5,4,3,2,1,0]))
for listSize in range(1000,10001,1000):
alist = [randrange(100000) for x in range(listSize)]
start = time.time()
print (findMin(alist))
end = time.time()
start1 = time.time()
print (findMin1(alist))
end1 = time.time()
print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
|
<commit_before><commit_msg>Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add author and email.<commit_after>__author__ = "Claytonbat"
from random import randrange
import time
def findMin(alist):
overallmin = alist[0]
for i in alist:
issmallest = True
for j in alist :
if i > j:
issmallest = False
if issmallest:
overallmin = i
return overallmin
def findMin1(alist):
temp_min = alist[0]
for i in alist:
if temp_min > i:
temp_min = i
return temp_min
#print(findMin([5,4,3,2,1,0]))
for listSize in range(1000,10001,1000):
alist = [randrange(100000) for x in range(listSize)]
start = time.time()
print (findMin(alist))
end = time.time()
start1 = time.time()
print (findMin1(alist))
end1 = time.time()
print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
|
|
cd74d2af4d0d09f8ba2b6aefffde14cc233740b7
|
samples/python/chlauth_set.py
|
samples/python/chlauth_set.py
|
'''
This sample will create a new local queue.
MQWeb runs on localhost and is listening on port 8081.
'''
import sys
import json
import httplib
import socket
import argparse
parser = argparse.ArgumentParser(
description='MQWeb - Python sample - Set Channel Authentication Record',
epilog="For more information: http://www.mqweb.org"
)
parser.add_argument('-m', '--queuemanager', help='Name of the queuemanager', required=True)
parser.add_argument('-p', '--profile', help='Name of the channel', required=True)
parser.add_argument('-a', '--address', help='The address of the client', required=True)
parser.add_argument('-mca', '--mcauser', help='The MCAUser id', required=True)
parser.add_argument('-c', '--clientuser', help='The client user id', required=True)
args = parser.parse_args()
url = "/api/chlauth/set/" + args.queuemanager
input = {
'ChannelName' : args.profile,
'Type' : 'UserMap',
'ConnectionName' : args.address,
'MCAUserIdentifier' : args.mcauser,
'ClientUserIdentifier' : args.clientuser,
'Action' : 'Replace',
'UserSrc' : 'Map'
}
try:
headers = {
'Content-Type': 'application/json'
}
conn = httplib.HTTPConnection('localhost', 8081)
conn.request('POST', url, json.dumps(input), headers)
res = conn.getresponse()
result = json.loads(res.read())
print(result)
if 'error' in result:
print ('Received a WebSphere MQ error: ' +
str(result['error']['reason']['code']) + ' - ' +
result['error']['reason']['desc']
)
else:
print('Channel authentication records added for ' + args.profile + ' on ' + args.queuemanager)
except httplib.HTTPException as e:
print ('An HTTP error occurred while setting channel authentication records: ' +
e.errno + e.strerror
)
except socket.error as e:
print e.strerror
print 'Is the MQWeb daemon running?'
|
Add sample for set chlauth
|
Add sample for set chlauth
|
Python
|
mit
|
fbraem/mqweb,fbraem/mqweb,fbraem/mqweb
|
Add sample for set chlauth
|
'''
This sample will create a new local queue.
MQWeb runs on localhost and is listening on port 8081.
'''
import sys
import json
import httplib
import socket
import argparse
parser = argparse.ArgumentParser(
description='MQWeb - Python sample - Set Channel Authentication Record',
epilog="For more information: http://www.mqweb.org"
)
parser.add_argument('-m', '--queuemanager', help='Name of the queuemanager', required=True)
parser.add_argument('-p', '--profile', help='Name of the channel', required=True)
parser.add_argument('-a', '--address', help='The address of the client', required=True)
parser.add_argument('-mca', '--mcauser', help='The MCAUser id', required=True)
parser.add_argument('-c', '--clientuser', help='The client user id', required=True)
args = parser.parse_args()
url = "/api/chlauth/set/" + args.queuemanager
input = {
'ChannelName' : args.profile,
'Type' : 'UserMap',
'ConnectionName' : args.address,
'MCAUserIdentifier' : args.mcauser,
'ClientUserIdentifier' : args.clientuser,
'Action' : 'Replace',
'UserSrc' : 'Map'
}
try:
headers = {
'Content-Type': 'application/json'
}
conn = httplib.HTTPConnection('localhost', 8081)
conn.request('POST', url, json.dumps(input), headers)
res = conn.getresponse()
result = json.loads(res.read())
print(result)
if 'error' in result:
print ('Received a WebSphere MQ error: ' +
str(result['error']['reason']['code']) + ' - ' +
result['error']['reason']['desc']
)
else:
print('Channel authentication records added for ' + args.profile + ' on ' + args.queuemanager)
except httplib.HTTPException as e:
print ('An HTTP error occurred while setting channel authentication records: ' +
e.errno + e.strerror
)
except socket.error as e:
print e.strerror
print 'Is the MQWeb daemon running?'
|
<commit_before><commit_msg>Add sample for set chlauth<commit_after>
|
'''
This sample will create a new local queue.
MQWeb runs on localhost and is listening on port 8081.
'''
import sys
import json
import httplib
import socket
import argparse
parser = argparse.ArgumentParser(
description='MQWeb - Python sample - Set Channel Authentication Record',
epilog="For more information: http://www.mqweb.org"
)
parser.add_argument('-m', '--queuemanager', help='Name of the queuemanager', required=True)
parser.add_argument('-p', '--profile', help='Name of the channel', required=True)
parser.add_argument('-a', '--address', help='The address of the client', required=True)
parser.add_argument('-mca', '--mcauser', help='The MCAUser id', required=True)
parser.add_argument('-c', '--clientuser', help='The client user id', required=True)
args = parser.parse_args()
url = "/api/chlauth/set/" + args.queuemanager
input = {
'ChannelName' : args.profile,
'Type' : 'UserMap',
'ConnectionName' : args.address,
'MCAUserIdentifier' : args.mcauser,
'ClientUserIdentifier' : args.clientuser,
'Action' : 'Replace',
'UserSrc' : 'Map'
}
try:
headers = {
'Content-Type': 'application/json'
}
conn = httplib.HTTPConnection('localhost', 8081)
conn.request('POST', url, json.dumps(input), headers)
res = conn.getresponse()
result = json.loads(res.read())
print(result)
if 'error' in result:
print ('Received a WebSphere MQ error: ' +
str(result['error']['reason']['code']) + ' - ' +
result['error']['reason']['desc']
)
else:
print('Channel authentication records added for ' + args.profile + ' on ' + args.queuemanager)
except httplib.HTTPException as e:
print ('An HTTP error occurred while setting channel authentication records: ' +
e.errno + e.strerror
)
except socket.error as e:
print e.strerror
print 'Is the MQWeb daemon running?'
|
Add sample for set chlauth'''
This sample will create a new local queue.
MQWeb runs on localhost and is listening on port 8081.
'''
import sys
import json
import httplib
import socket
import argparse
parser = argparse.ArgumentParser(
description='MQWeb - Python sample - Set Channel Authentication Record',
epilog="For more information: http://www.mqweb.org"
)
parser.add_argument('-m', '--queuemanager', help='Name of the queuemanager', required=True)
parser.add_argument('-p', '--profile', help='Name of the channel', required=True)
parser.add_argument('-a', '--address', help='The address of the client', required=True)
parser.add_argument('-mca', '--mcauser', help='The MCAUser id', required=True)
parser.add_argument('-c', '--clientuser', help='The client user id', required=True)
args = parser.parse_args()
url = "/api/chlauth/set/" + args.queuemanager
input = {
'ChannelName' : args.profile,
'Type' : 'UserMap',
'ConnectionName' : args.address,
'MCAUserIdentifier' : args.mcauser,
'ClientUserIdentifier' : args.clientuser,
'Action' : 'Replace',
'UserSrc' : 'Map'
}
try:
headers = {
'Content-Type': 'application/json'
}
conn = httplib.HTTPConnection('localhost', 8081)
conn.request('POST', url, json.dumps(input), headers)
res = conn.getresponse()
result = json.loads(res.read())
print(result)
if 'error' in result:
print ('Received a WebSphere MQ error: ' +
str(result['error']['reason']['code']) + ' - ' +
result['error']['reason']['desc']
)
else:
print('Channel authentication records added for ' + args.profile + ' on ' + args.queuemanager)
except httplib.HTTPException as e:
print ('An HTTP error occurred while setting channel authentication records: ' +
e.errno + e.strerror
)
except socket.error as e:
print e.strerror
print 'Is the MQWeb daemon running?'
|
<commit_before><commit_msg>Add sample for set chlauth<commit_after>'''
This sample will create a new local queue.
MQWeb runs on localhost and is listening on port 8081.
'''
import sys
import json
import httplib
import socket
import argparse
parser = argparse.ArgumentParser(
description='MQWeb - Python sample - Set Channel Authentication Record',
epilog="For more information: http://www.mqweb.org"
)
parser.add_argument('-m', '--queuemanager', help='Name of the queuemanager', required=True)
parser.add_argument('-p', '--profile', help='Name of the channel', required=True)
parser.add_argument('-a', '--address', help='The address of the client', required=True)
parser.add_argument('-mca', '--mcauser', help='The MCAUser id', required=True)
parser.add_argument('-c', '--clientuser', help='The client user id', required=True)
args = parser.parse_args()
url = "/api/chlauth/set/" + args.queuemanager
input = {
'ChannelName' : args.profile,
'Type' : 'UserMap',
'ConnectionName' : args.address,
'MCAUserIdentifier' : args.mcauser,
'ClientUserIdentifier' : args.clientuser,
'Action' : 'Replace',
'UserSrc' : 'Map'
}
try:
headers = {
'Content-Type': 'application/json'
}
conn = httplib.HTTPConnection('localhost', 8081)
conn.request('POST', url, json.dumps(input), headers)
res = conn.getresponse()
result = json.loads(res.read())
print(result)
if 'error' in result:
print ('Received a WebSphere MQ error: ' +
str(result['error']['reason']['code']) + ' - ' +
result['error']['reason']['desc']
)
else:
print('Channel authentication records added for ' + args.profile + ' on ' + args.queuemanager)
except httplib.HTTPException as e:
print ('An HTTP error occurred while setting channel authentication records: ' +
e.errno + e.strerror
)
except socket.error as e:
print e.strerror
print 'Is the MQWeb daemon running?'
|
|
d56da91c5613b281ebd90a7391429ebac0d83159
|
scripts/process_results.py
|
scripts/process_results.py
|
import os
import pandas as pd
import sqlite3
import json
input_path=''
input_filenames = ["0101000000.sql", "1000000000.sql"]
def sql_query(input_path, input_filename, report_name, table_name, row_name, column_name):
sql_query = 'SELECT Value from TabularDataWithStrings WHERE ReportName=\'' + report_name + '\''
#sql_query += ' and ReportForString=\'' + report_for_string + '\''
sql_query += ' and TableName=\'' + table_name + '\''
sql_query += ' and ColumnName=\'' + column_name + '\''
sql_query += ' and RowName=\'' + row_name + '\''
sql_query += ';'
print sql_query
con = sqlite3.connect(os.path.join(input_path, input_filename))
df = pd.read_sql_query(sql_query, con)
con.close()
return float(df.iloc[0]['Value'])
for input_filename in input_filenames:
case_dict = {}
eui = sql_query(input_path, input_filename, 'AnnualBuildingUtilityPerformanceSummary','Site and Source Energy','Total Site Energy','Energy Per Total Building Area')
cooling = sql_query(input_path, input_filename, 'ComponentSizingSummary', 'DistrictCooling', 'DISTRICT COOLING', 'Design Size Nominal Capacity')
heating = sql_query(input_path, input_filename, 'ComponentSizingSummary','DistrictHeating','DISTRICT HEATING','Design Size Nominal Capacity')
case_dict.update({
'eui': eui,
'cooling': cooling,
'heating': heating
})
with open(os.path.splitext(input_filename)[0] + '.json', 'w') as outfile:
json.dump(case_dict, outfile)
|
Add script to process EnergyPlus SQL and create json result files.
|
Add script to process EnergyPlus SQL and create json result files.
|
Python
|
mit
|
design-generator/data,design-generator/data
|
Add script to process EnergyPlus SQL and create json result files.
|
import os
import pandas as pd
import sqlite3
import json
input_path=''
input_filenames = ["0101000000.sql", "1000000000.sql"]
def sql_query(input_path, input_filename, report_name, table_name, row_name, column_name):
sql_query = 'SELECT Value from TabularDataWithStrings WHERE ReportName=\'' + report_name + '\''
#sql_query += ' and ReportForString=\'' + report_for_string + '\''
sql_query += ' and TableName=\'' + table_name + '\''
sql_query += ' and ColumnName=\'' + column_name + '\''
sql_query += ' and RowName=\'' + row_name + '\''
sql_query += ';'
print sql_query
con = sqlite3.connect(os.path.join(input_path, input_filename))
df = pd.read_sql_query(sql_query, con)
con.close()
return float(df.iloc[0]['Value'])
for input_filename in input_filenames:
case_dict = {}
eui = sql_query(input_path, input_filename, 'AnnualBuildingUtilityPerformanceSummary','Site and Source Energy','Total Site Energy','Energy Per Total Building Area')
cooling = sql_query(input_path, input_filename, 'ComponentSizingSummary', 'DistrictCooling', 'DISTRICT COOLING', 'Design Size Nominal Capacity')
heating = sql_query(input_path, input_filename, 'ComponentSizingSummary','DistrictHeating','DISTRICT HEATING','Design Size Nominal Capacity')
case_dict.update({
'eui': eui,
'cooling': cooling,
'heating': heating
})
with open(os.path.splitext(input_filename)[0] + '.json', 'w') as outfile:
json.dump(case_dict, outfile)
|
<commit_before><commit_msg>Add script to process EnergyPlus SQL and create json result files.<commit_after>
|
import os
import pandas as pd
import sqlite3
import json
input_path=''
input_filenames = ["0101000000.sql", "1000000000.sql"]
def sql_query(input_path, input_filename, report_name, table_name, row_name, column_name):
sql_query = 'SELECT Value from TabularDataWithStrings WHERE ReportName=\'' + report_name + '\''
#sql_query += ' and ReportForString=\'' + report_for_string + '\''
sql_query += ' and TableName=\'' + table_name + '\''
sql_query += ' and ColumnName=\'' + column_name + '\''
sql_query += ' and RowName=\'' + row_name + '\''
sql_query += ';'
print sql_query
con = sqlite3.connect(os.path.join(input_path, input_filename))
df = pd.read_sql_query(sql_query, con)
con.close()
return float(df.iloc[0]['Value'])
for input_filename in input_filenames:
case_dict = {}
eui = sql_query(input_path, input_filename, 'AnnualBuildingUtilityPerformanceSummary','Site and Source Energy','Total Site Energy','Energy Per Total Building Area')
cooling = sql_query(input_path, input_filename, 'ComponentSizingSummary', 'DistrictCooling', 'DISTRICT COOLING', 'Design Size Nominal Capacity')
heating = sql_query(input_path, input_filename, 'ComponentSizingSummary','DistrictHeating','DISTRICT HEATING','Design Size Nominal Capacity')
case_dict.update({
'eui': eui,
'cooling': cooling,
'heating': heating
})
with open(os.path.splitext(input_filename)[0] + '.json', 'w') as outfile:
json.dump(case_dict, outfile)
|
Add script to process EnergyPlus SQL and create json result files.
import os
import pandas as pd
import sqlite3
import json
input_path=''
input_filenames = ["0101000000.sql", "1000000000.sql"]
def sql_query(input_path, input_filename, report_name, table_name, row_name, column_name):
sql_query = 'SELECT Value from TabularDataWithStrings WHERE ReportName=\'' + report_name + '\''
#sql_query += ' and ReportForString=\'' + report_for_string + '\''
sql_query += ' and TableName=\'' + table_name + '\''
sql_query += ' and ColumnName=\'' + column_name + '\''
sql_query += ' and RowName=\'' + row_name + '\''
sql_query += ';'
print sql_query
con = sqlite3.connect(os.path.join(input_path, input_filename))
df = pd.read_sql_query(sql_query, con)
con.close()
return float(df.iloc[0]['Value'])
for input_filename in input_filenames:
case_dict = {}
eui = sql_query(input_path, input_filename, 'AnnualBuildingUtilityPerformanceSummary','Site and Source Energy','Total Site Energy','Energy Per Total Building Area')
cooling = sql_query(input_path, input_filename, 'ComponentSizingSummary', 'DistrictCooling', 'DISTRICT COOLING', 'Design Size Nominal Capacity')
heating = sql_query(input_path, input_filename, 'ComponentSizingSummary','DistrictHeating','DISTRICT HEATING','Design Size Nominal Capacity')
case_dict.update({
'eui': eui,
'cooling': cooling,
'heating': heating
})
with open(os.path.splitext(input_filename)[0] + '.json', 'w') as outfile:
json.dump(case_dict, outfile)
|
<commit_before><commit_msg>Add script to process EnergyPlus SQL and create json result files.<commit_after>
import os
import pandas as pd
import sqlite3
import json
input_path=''
input_filenames = ["0101000000.sql", "1000000000.sql"]
def sql_query(input_path, input_filename, report_name, table_name, row_name, column_name):
sql_query = 'SELECT Value from TabularDataWithStrings WHERE ReportName=\'' + report_name + '\''
#sql_query += ' and ReportForString=\'' + report_for_string + '\''
sql_query += ' and TableName=\'' + table_name + '\''
sql_query += ' and ColumnName=\'' + column_name + '\''
sql_query += ' and RowName=\'' + row_name + '\''
sql_query += ';'
print sql_query
con = sqlite3.connect(os.path.join(input_path, input_filename))
df = pd.read_sql_query(sql_query, con)
con.close()
return float(df.iloc[0]['Value'])
for input_filename in input_filenames:
case_dict = {}
eui = sql_query(input_path, input_filename, 'AnnualBuildingUtilityPerformanceSummary','Site and Source Energy','Total Site Energy','Energy Per Total Building Area')
cooling = sql_query(input_path, input_filename, 'ComponentSizingSummary', 'DistrictCooling', 'DISTRICT COOLING', 'Design Size Nominal Capacity')
heating = sql_query(input_path, input_filename, 'ComponentSizingSummary','DistrictHeating','DISTRICT HEATING','Design Size Nominal Capacity')
case_dict.update({
'eui': eui,
'cooling': cooling,
'heating': heating
})
with open(os.path.splitext(input_filename)[0] + '.json', 'w') as outfile:
json.dump(case_dict, outfile)
|
|
f83cc3e33bf8fe3e80502e39768483db2f78a63f
|
addons/sale/__terp__.py
|
addons/sale/__terp__.py
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml"
],
"active": False,
"installable": True
}
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml",
"sale_security.xml"
],
"active": False,
"installable": True
}
|
Add sale_security.xml file entry in update_xml section
|
Add sale_security.xml file entry in update_xml section
bzr revid: mga@tinyerp.com-b44d9932402c4941921f83487b823e7359d324c0
|
Python
|
agpl-3.0
|
naousse/odoo,slevenhagen/odoo,rahuldhote/odoo,Drooids/odoo,gorjuce/odoo,jiachenning/odoo,cpyou/odoo,glovebx/odoo,acshan/odoo,joshuajan/odoo,Maspear/odoo,gsmartway/odoo,OpusVL/odoo,apanju/odoo,kifcaliph/odoo,Codefans-fan/odoo,luiseduardohdbackup/odoo,hbrunn/OpenUpgrade,grap/OCB,abenzbiria/clients_odoo,BT-rmartin/odoo,luiseduardohdbackup/odoo,rowemoore/odoo,erkrishna9/odoo,apocalypsebg/odoo,kirca/OpenUpgrade,dezynetechnologies/odoo,Adel-Magebinary/odoo,fuhongliang/odoo,gsmartway/odoo,QianBIG/odoo,srimai/odoo,ccomb/OpenUpgrade,pedrobaeza/OpenUpgrade,aviciimaxwell/odoo,MarcosCommunity/odoo,Danisan/odoo-1,doomsterinc/odoo,bkirui/odoo,hbrunn/OpenUpgrade,0k/OpenUpgrade,nuuuboo/odoo,christophlsa/odoo,shingonoide/odoo,rahuldhote/odoo,kybriainfotech/iSocioCRM,diagramsoftware/odoo,shaufi/odoo,acshan/odoo,funkring/fdoo,mustafat/odoo-1,hoatle/odoo,ramadhane/odoo,jolevq/odoopub,grap/OCB,odoousers2014/odoo,lgscofield/odoo,slevenhagen/odoo-npg,Daniel-CA/odoo,juanalfonsopr/odoo,havt/odoo,VitalPet/odoo,incaser/odoo-odoo,charbeljc/OCB,alexcuellar/odoo,bakhtout/odoo-educ,SAM-IT-SA/odoo,hassoon3/odoo,prospwro/odoo,jfpla/odoo,pplatek/odoo,matrixise/odoo,aviciimaxwell/odoo,frouty/odoo_oph,joshuajan/odoo,idncom/odoo,TRESCLOUD/odoopub,kifcaliph/odoo,provaleks/o8,spadae22/odoo,ujjwalwahi/odoo,tvtsoft/odoo8,makinacorpus/odoo,laslabs/odoo,tvibliani/odoo,slevenhagen/odoo,takis/odoo,bguillot/OpenUpgrade,datenbetrieb/odoo,jusdng/odoo,dllsf/odootest,0k/OpenUpgrade,mvaled/OpenUpgrade,rschnapka/odoo,arthru/OpenUpgrade,apanju/GMIO_Odoo,cpyou/odoo,fjbatresv/odoo,aviciimaxwell/odoo,florentx/OpenUpgrade,zchking/odoo,xujb/odoo,codekaki/odoo,shaufi/odoo,hopeall/odoo,draugiskisprendimai/odoo,GauravSahu/odoo,papouso/odoo,dezynetechnologies/odoo,ramadhane/odoo,ccomb/OpenUpgrade,incaser/odoo-odoo,gavin-feng/odoo,Ernesto99/odoo,hoatle/odoo,guerrerocarlos/odoo,ChanduERP/odoo,shaufi/odoo,Ichag/odoo,BT-ojossen/odoo,dgzurita/odoo,poljeff/odoo,ihsanudin/odoo,bakhtout/odoo-educ,fjbatresv/odoo,tinkhaven-organization/odoo,fuselock/odoo,bplancher/odoo,aviciimaxwell/odoo,abstract-open-solutions/OCB,storm-computers/odoo,virgree/odoo,leoliujie/odoo,glovebx/odoo,windedge/odoo,hmen89/odoo,nitinitprof/odoo,poljeff/odoo,ihsanudin/odoo,markeTIC/OCB,OpusVL/odoo,cloud9UG/odoo,hip-odoo/odoo,credativUK/OCB,jesramirez/odoo,Maspear/odoo,OSSESAC/odoopubarquiluz,synconics/odoo,jaxkodex/odoo,pplatek/odoo,hip-odoo/odoo,Antiun/odoo,lgscofield/odoo,ujjwalwahi/odoo,bguillot/OpenUpgrade,BT-ojossen/odoo,pedrobaeza/OpenUpgrade,frouty/odoogoeen,podemos-info/odoo,Grirrane/odoo,BT-fgarbely/odoo,jaxkodex/odoo,draugiskisprendimai/odoo,x111ong/odoo,highco-groupe/odoo,ubic135/odoo-design,mmbtba/odoo,ShineFan/odoo,Danisan/odoo-1,alexteodor/odoo,gdgellatly/OCB1,oasiswork/odoo,bplancher/odoo,mvaled/OpenUpgrade,leorochael/odoo,bkirui/odoo,oasiswork/odoo,shivam1111/odoo,osvalr/odoo,dalegregory/odoo,ccomb/OpenUpgrade,Ichag/odoo,odoousers2014/odoo,mlaitinen/odoo,VitalPet/odoo,PongPi/isl-odoo,dllsf/odootest,n0m4dz/odoo,luistorresm/odoo,numerigraphe/odoo,alhashash/odoo,feroda/odoo,joariasl/odoo,dfang/odoo,rschnapka/odoo,slevenhagen/odoo,funkring/fdoo,jusdng/odoo,odoo-turkiye/odoo,fuselock/odoo,stephen144/odoo,jaxkodex/odoo,salaria/odoo,rubencabrera/odoo,apanju/odoo,nagyistoce/odoo-dev-odoo,ehirt/odoo,MarcosCommunity/odoo,minhtuancn/odoo,Endika/odoo,lombritz/odoo,0k/odoo,Danisan/odoo-1,bealdav/OpenUpgrade,Maspear/odoo,apanju/GMIO_Odoo,shaufi10/odoo,collex100/odoo,nitinitprof/odoo,naousse/odoo,Nick-OpusVL/odoo,ApuliaSoftware/odoo,kittiu/odoo,oliverhr/odoo,pplatek/odoo,luiseduardohdbackup/odoo,juanalfonsopr/odoo,ovnicraft/odoo,poljeff/odoo,ApuliaSoftware/odoo,dezynetechnologies/odoo,Daniel-CA/odoo,tvibliani/odoo,lgscofield/odoo,abstract-open-solutions/OCB,spadae22/odoo,feroda/odoo,Ichag/odoo,fossoult/odoo,Codefans-fan/odoo,Endika/odoo,ingadhoc/odoo,dllsf/odootest,funkring/fdoo,makinacorpus/odoo,Gitlab11/odoo,x111ong/odoo,ihsanudin/odoo,pedrobaeza/OpenUpgrade,rschnapka/odoo,fuhongliang/odoo,takis/odoo,gavin-feng/odoo,optima-ict/odoo,Elico-Corp/odoo_OCB,ThinkOpen-Solutions/odoo,Noviat/odoo,ramadhane/odoo,vrenaville/ngo-addons-backport,numerigraphe/odoo,nexiles/odoo,pplatek/odoo,syci/OCB,Nowheresly/odoo,bwrsandman/OpenUpgrade,waytai/odoo,grap/OpenUpgrade,dllsf/odootest,joshuajan/odoo,ujjwalwahi/odoo,guewen/OpenUpgrade,steedos/odoo,TRESCLOUD/odoopub,JonathanStein/odoo,xzYue/odoo,laslabs/odoo,CatsAndDogsbvba/odoo,stonegithubs/odoo,lsinfo/odoo,ujjwalwahi/odoo,Codefans-fan/odoo,ThinkOpen-Solutions/odoo,shivam1111/odoo,tvibliani/odoo,ojengwa/odoo,chiragjogi/odoo,sebalix/OpenUpgrade,x111ong/odoo,ShineFan/odoo,NeovaHealth/odoo,abdellatifkarroum/odoo,MarcosCommunity/odoo,optima-ict/odoo,mmbtba/odoo,luiseduardohdbackup/odoo,draugiskisprendimai/odoo,damdam-s/OpenUpgrade,oliverhr/odoo,brijeshkesariya/odoo,nitinitprof/odoo,mmbtba/odoo,Gitlab11/odoo,eino-makitalo/odoo,alexteodor/odoo,salaria/odoo,dfang/odoo,credativUK/OCB,BT-rmartin/odoo,rowemoore/odoo,juanalfonsopr/odoo,matrixise/odoo,Eric-Zhong/odoo,andreparames/odoo,takis/odoo,jpshort/odoo,minhtuancn/odoo,windedge/odoo,eino-makitalo/odoo,Elico-Corp/odoo_OCB,dkubiak789/odoo,patmcb/odoo,acshan/odoo,storm-computers/odoo,jiachenning/odoo,Nick-OpusVL/odoo,javierTerry/odoo,CubicERP/odoo,sinbazhou/odoo,PongPi/isl-odoo,Ernesto99/odoo,vnsofthe/odoo,tarzan0820/odoo,colinnewell/odoo,thanhacun/odoo,klunwebale/odoo,ChanduERP/odoo,luiseduardohdbackup/odoo,srsman/odoo,brijeshkesariya/odoo,sinbazhou/odoo,agrista/odoo-saas,sysadminmatmoz/OCB,oihane/odoo,vnsofthe/odoo,n0m4dz/odoo,highco-groupe/odoo,guewen/OpenUpgrade,Endika/OpenUpgrade,fevxie/odoo,OpusVL/odoo,synconics/odoo,rowemoore/odoo,GauravSahu/odoo,rowemoore/odoo,Danisan/odoo-1,VitalPet/odoo,fevxie/odoo,wangjun/odoo,n0m4dz/odoo,florian-dacosta/OpenUpgrade,bwrsandman/OpenUpgrade,synconics/odoo,OSSESAC/odoopubarquiluz,sve-odoo/odoo,nuncjo/odoo,bakhtout/odoo-educ,ovnicraft/odoo,lightcn/odoo,JonathanStein/odoo,SAM-IT-SA/odoo,hifly/OpenUpgrade,odoo-turkiye/odoo,apanju/GMIO_Odoo,hanicker/odoo,mustafat/odoo-1,Drooids/odoo,TRESCLOUD/odoopub,BT-rmartin/odoo,wangjun/odoo,thanhacun/odoo,VielSoft/odoo,kittiu/odoo,odootr/odoo,rubencabrera/odoo,tarzan0820/odoo,jpshort/odoo,dsfsdgsbngfggb/odoo,cdrooom/odoo,srimai/odoo,guewen/OpenUpgrade,kybriainfotech/iSocioCRM,slevenhagen/odoo-npg,savoirfairelinux/OpenUpgrade,shivam1111/odoo,jiangzhixiao/odoo,chiragjogi/odoo,javierTerry/odoo,laslabs/odoo,nhomar/odoo-mirror,eino-makitalo/odoo,gvb/odoo,stonegithubs/odoo,tinkhaven-organization/odoo,idncom/odoo,pedrobaeza/odoo,abdellatifkarroum/odoo,bakhtout/odoo-educ,ecosoft-odoo/odoo,NL66278/OCB,osvalr/odoo,nuuuboo/odoo,QianBIG/odoo,FlorianLudwig/odoo,AuyaJackie/odoo,ramadhane/odoo,bobisme/odoo,frouty/odoo_oph,nhomar/odoo-mirror,ygol/odoo,Nowheresly/odoo,ramitalat/odoo,collex100/odoo,windedge/odoo,simongoffin/website_version,fuselock/odoo,ThinkOpen-Solutions/odoo,credativUK/OCB,nitinitprof/odoo,jpshort/odoo,Drooids/odoo,dalegregory/odoo,Bachaco-ve/odoo,jusdng/odoo,alqfahad/odoo,0k/OpenUpgrade,avoinsystems/odoo,GauravSahu/odoo,JGarcia-Panach/odoo,0k/odoo,massot/odoo,Codefans-fan/odoo,naousse/odoo,JonathanStein/odoo,shivam1111/odoo,oasiswork/odoo,joshuajan/odoo,slevenhagen/odoo,ingadhoc/odoo,gavin-feng/odoo,mlaitinen/odoo,BT-astauder/odoo,leoliujie/odoo,0k/odoo,poljeff/odoo,podemos-info/odoo,srimai/odoo,rowemoore/odoo,omprakasha/odoo,hifly/OpenUpgrade,shivam1111/odoo,alexcuellar/odoo,collex100/odoo,sysadminmatmoz/OCB,patmcb/odoo,waytai/odoo,lsinfo/odoo,ehirt/odoo,christophlsa/odoo,gdgellatly/OCB1,wangjun/odoo,Grirrane/odoo,bwrsandman/OpenUpgrade,havt/odoo,abenzbiria/clients_odoo,nhomar/odoo,savoirfairelinux/OpenUpgrade,sve-odoo/odoo,storm-computers/odoo,erkrishna9/odoo,Endika/OpenUpgrade,cpyou/odoo,rschnapka/odoo,savoirfairelinux/odoo,hbrunn/OpenUpgrade,FlorianLudwig/odoo,agrista/odoo-saas,ingadhoc/odoo,omprakasha/odoo,bealdav/OpenUpgrade,CatsAndDogsbvba/odoo,papouso/odoo,nhomar/odoo,patmcb/odoo,waytai/odoo,feroda/odoo,rgeleta/odoo,gorjuce/odoo,tvibliani/odoo,syci/OCB,colinnewell/odoo,hip-odoo/odoo,dalegregory/odoo,goliveirab/odoo,tinkhaven-organization/odoo,ovnicraft/odoo,gavin-feng/odoo,xzYue/odoo,zchking/odoo,nagyistoce/odoo-dev-odoo,fuselock/odoo,shingonoide/odoo,fuselock/odoo,alexteodor/odoo,SerpentCS/odoo,abdellatifkarroum/odoo,dllsf/odootest,blaggacao/OpenUpgrade,SAM-IT-SA/odoo,collex100/odoo,dkubiak789/odoo,frouty/odoogoeen,jeasoft/odoo,sysadminmatmoz/OCB,Eric-Zhong/odoo,hmen89/odoo,ClearCorp-dev/odoo,nexiles/odoo,slevenhagen/odoo,chiragjogi/odoo,ramadhane/odoo,osvalr/odoo,CopeX/odoo,dezynetechnologies/odoo,tinkerthaler/odoo,tangyiyong/odoo,ingadhoc/odoo,microcom/odoo,jusdng/odoo,sysadminmatmoz/OCB,ovnicraft/odoo,guerrerocarlos/odoo,oasiswork/odoo,dfang/odoo,fuhongliang/odoo,eino-makitalo/odoo,Ichag/odoo,kirca/OpenUpgrade,JGarcia-Panach/odoo,BT-fgarbely/odoo,demon-ru/iml-crm,charbeljc/OCB,tarzan0820/odoo,shaufi10/odoo,virgree/odoo,gavin-feng/odoo,dezynetechnologies/odoo,zchking/odoo,bobisme/odoo,grap/OpenUpgrade,VielSoft/odoo,ubic135/odoo-design,Daniel-CA/odoo,jusdng/odoo,jiangzhixiao/odoo,Endika/OpenUpgrade,frouty/odoo_oph,x111ong/odoo,stonegithubs/odoo,klunwebale/odoo,dkubiak789/odoo,collex100/odoo,mszewczy/odoo,gvb/odoo,lombritz/odoo,OSSESAC/odoopubarquiluz,ramitalat/odoo,Maspear/odoo,brijeshkesariya/odoo,guewen/OpenUpgrade,tarzan0820/odoo,havt/odoo,charbeljc/OCB,wangjun/odoo,waytai/odoo,avoinsystems/odoo,stephen144/odoo,FlorianLudwig/odoo,takis/odoo,nexiles/odoo,bkirui/odoo,hubsaysnuaa/odoo,hifly/OpenUpgrade,feroda/odoo,apocalypsebg/odoo,luistorresm/odoo,doomsterinc/odoo,sve-odoo/odoo,fdvarela/odoo8,waytai/odoo,feroda/odoo,sebalix/OpenUpgrade,abstract-open-solutions/OCB,fgesora/odoo,papouso/odoo,OpenPymeMx/OCB,GauravSahu/odoo,sebalix/OpenUpgrade,funkring/fdoo,lightcn/odoo,hubsaysnuaa/odoo,salaria/odoo,idncom/odoo,demon-ru/iml-crm,NeovaHealth/odoo,osvalr/odoo,odoo-turkiye/odoo,nexiles/odoo,ApuliaSoftware/odoo,JGarcia-Panach/odoo,factorlibre/OCB,GauravSahu/odoo,abenzbiria/clients_odoo,nagyistoce/odoo-dev-odoo,guerrerocarlos/odoo,erkrishna9/odoo,Drooids/odoo,hanicker/odoo,deKupini/erp,Grirrane/odoo,alhashash/odoo,NeovaHealth/odoo,OpenUpgrade/OpenUpgrade,doomsterinc/odoo,hubsaysnuaa/odoo,draugiskisprendimai/odoo,OpenUpgrade-dev/OpenUpgrade,mmbtba/odoo,blaggacao/OpenUpgrade,ShineFan/odoo,bguillot/OpenUpgrade,ovnicraft/odoo,tangyiyong/odoo,florian-dacosta/OpenUpgrade,CatsAndDogsbvba/odoo,NeovaHealth/odoo,CubicERP/odoo,microcom/odoo,ThinkOpen-Solutions/odoo,ehirt/odoo,markeTIC/OCB,csrocha/OpenUpgrade,gorjuce/odoo,dgzurita/odoo,BT-fgarbely/odoo,vrenaville/ngo-addons-backport,diagramsoftware/odoo,oihane/odoo,apocalypsebg/odoo,leoliujie/odoo,OpenUpgrade/OpenUpgrade,MarcosCommunity/odoo,rahuldhote/odoo,lsinfo/odoo,cysnake4713/odoo,sv-dev1/odoo,leorochael/odoo,idncom/odoo,Maspear/odoo,hanicker/odoo,pedrobaeza/OpenUpgrade,arthru/OpenUpgrade,jfpla/odoo,florentx/OpenUpgrade,charbeljc/OCB,nhomar/odoo,fdvarela/odoo8,VielSoft/odoo,wangjun/odoo,mustafat/odoo-1,florian-dacosta/OpenUpgrade,nuuuboo/odoo,feroda/odoo,SAM-IT-SA/odoo,hoatle/odoo,patmcb/odoo,credativUK/OCB,massot/odoo,blaggacao/OpenUpgrade,hanicker/odoo,factorlibre/OCB,slevenhagen/odoo-npg,dgzurita/odoo,VitalPet/odoo,bplancher/odoo,apocalypsebg/odoo,tvibliani/odoo,erkrishna9/odoo,factorlibre/OCB,leorochael/odoo,Daniel-CA/odoo,rschnapka/odoo,nhomar/odoo,savoirfairelinux/odoo,ujjwalwahi/odoo,pedrobaeza/odoo,nuncjo/odoo,jfpla/odoo,laslabs/odoo,joariasl/odoo,bakhtout/odoo-educ,ccomb/OpenUpgrade,rgeleta/odoo,sadleader/odoo,matrixise/odoo,OpenUpgrade-dev/OpenUpgrade,funkring/fdoo,klunwebale/odoo,apanju/odoo,cloud9UG/odoo,syci/OCB,rdeheele/odoo,sinbazhou/odoo,VitalPet/odoo,blaggacao/OpenUpgrade,savoirfairelinux/odoo,CubicERP/odoo,damdam-s/OpenUpgrade,gsmartway/odoo,fdvarela/odoo8,laslabs/odoo,collex100/odoo,BT-ojossen/odoo,lgscofield/odoo,QianBIG/odoo,funkring/fdoo,OpenUpgrade/OpenUpgrade,pplatek/odoo,savoirfairelinux/OpenUpgrade,ecosoft-odoo/odoo,synconics/odoo,Noviat/odoo,bealdav/OpenUpgrade,storm-computers/odoo,joariasl/odoo,slevenhagen/odoo,OpenUpgrade/OpenUpgrade,abdellatifkarroum/odoo,VielSoft/odoo,acshan/odoo,srimai/odoo,odoousers2014/odoo,OpusVL/odoo,ThinkOpen-Solutions/odoo,juanalfonsopr/odoo,camptocamp/ngo-addons-backport,nuncjo/odoo,abdellatifkarroum/odoo,OpenPymeMx/OCB,Danisan/odoo-1,CopeX/odoo,mvaled/OpenUpgrade,waytai/odoo,sinbazhou/odoo,addition-it-solutions/project-all,TRESCLOUD/odoopub,rahuldhote/odoo,inspyration/odoo,Ernesto99/odoo,xzYue/odoo,elmerdpadilla/iv,jeasoft/odoo,jfpla/odoo,abenzbiria/clients_odoo,SerpentCS/odoo,sebalix/OpenUpgrade,codekaki/odoo,odooindia/odoo,prospwro/odoo,omprakasha/odoo,andreparames/odoo,tangyiyong/odoo,Eric-Zhong/odoo,ygol/odoo,jiachenning/odoo,FlorianLudwig/odoo,ygol/odoo,nuncjo/odoo,ramitalat/odoo,cysnake4713/odoo,BT-ojossen/odoo,fgesora/odoo,Endika/OpenUpgrade,zchking/odoo,sergio-incaser/odoo,fgesora/odoo,VielSoft/odoo,PongPi/isl-odoo,datenbetrieb/odoo,abenzbiria/clients_odoo,BT-ojossen/odoo,deKupini/erp,bplancher/odoo,patmcb/odoo,Endika/OpenUpgrade,hmen89/odoo,SAM-IT-SA/odoo,acshan/odoo,fjbatresv/odoo,colinnewell/odoo,minhtuancn/odoo,ramitalat/odoo,storm-computers/odoo,simongoffin/website_version,apanju/GMIO_Odoo,blaggacao/OpenUpgrade,Grirrane/odoo,ecosoft-odoo/odoo,GauravSahu/odoo,synconics/odoo,optima-ict/odoo,osvalr/odoo,hbrunn/OpenUpgrade,grap/OpenUpgrade,janocat/odoo,alexcuellar/odoo,thanhacun/odoo,Drooids/odoo,leoliujie/odoo,sinbazhou/odoo,agrista/odoo-saas,jiangzhixiao/odoo,lightcn/odoo,juanalfonsopr/odoo,makinacorpus/odoo,addition-it-solutions/project-all,datenbetrieb/odoo,Nick-OpusVL/odoo,tinkerthaler/odoo,joariasl/odoo,glovebx/odoo,VielSoft/odoo,mlaitinen/odoo,dariemp/odoo,Drooids/odoo,ChanduERP/odoo,podemos-info/odoo,minhtuancn/odoo,fevxie/odoo,tarzan0820/odoo,Maspear/odoo,arthru/OpenUpgrade,ojengwa/odoo,prospwro/odoo,addition-it-solutions/project-all,tinkhaven-organization/odoo,Grirrane/odoo,grap/OCB,mvaled/OpenUpgrade,steedos/odoo,kittiu/odoo,frouty/odoogoeen,dariemp/odoo,guewen/OpenUpgrade,NL66278/OCB,sadleader/odoo,pedrobaeza/OpenUpgrade,KontorConsulting/odoo,savoirfairelinux/OpenUpgrade,Kilhog/odoo,leoliujie/odoo,naousse/odoo,alqfahad/odoo,Adel-Magebinary/odoo,highco-groupe/odoo,lightcn/odoo,OpenUpgrade-dev/OpenUpgrade,Antiun/odoo,SAM-IT-SA/odoo,BT-ojossen/odoo,mmbtba/odoo,pedrobaeza/OpenUpgrade,0k/OpenUpgrade,bobisme/odoo,minhtuancn/odoo,draugiskisprendimai/odoo,abdellatifkarroum/odoo,frouty/odoogoeen,apanju/GMIO_Odoo,sinbazhou/odoo,draugiskisprendimai/odoo,0k/OpenUpgrade,realsaiko/odoo,cedk/odoo,Nowheresly/odoo,tinkerthaler/odoo,AuyaJackie/odoo,apanju/GMIO_Odoo,OpenUpgrade/OpenUpgrade,cdrooom/odoo,ShineFan/odoo,damdam-s/OpenUpgrade,gvb/odoo,grap/OCB,vnsofthe/odoo,dsfsdgsbngfggb/odoo,kirca/OpenUpgrade,sergio-incaser/odoo,hoatle/odoo,matrixise/odoo,dfang/odoo,idncom/odoo,janocat/odoo,credativUK/OCB,CopeX/odoo,jfpla/odoo,brijeshkesariya/odoo,highco-groupe/odoo,camptocamp/ngo-addons-backport,KontorConsulting/odoo,glovebx/odoo,fdvarela/odoo8,acshan/odoo,kirca/OpenUpgrade,fevxie/odoo,OpenPymeMx/OCB,BT-rmartin/odoo,podemos-info/odoo,datenbetrieb/odoo,bguillot/OpenUpgrade,gvb/odoo,BT-rmartin/odoo,joshuajan/odoo,abdellatifkarroum/odoo,odootr/odoo,goliveirab/odoo,BT-astauder/odoo,jiachenning/odoo,addition-it-solutions/project-all,bwrsandman/OpenUpgrade,waytai/odoo,x111ong/odoo,savoirfairelinux/odoo,ygol/odoo,BT-fgarbely/odoo,erkrishna9/odoo,oihane/odoo,tvtsoft/odoo8,kittiu/odoo,hanicker/odoo,Ernesto99/odoo,hopeall/odoo,doomsterinc/odoo,bealdav/OpenUpgrade,gavin-feng/odoo,csrocha/OpenUpgrade,rahuldhote/odoo,apanju/odoo,apanju/odoo,Antiun/odoo,mszewczy/odoo,elmerdpadilla/iv,inspyration/odoo,Noviat/odoo,chiragjogi/odoo,fgesora/odoo,Nowheresly/odoo,dgzurita/odoo,ihsanudin/odoo,prospwro/odoo,alexteodor/odoo,CubicERP/odoo,inspyration/odoo,microcom/odoo,incaser/odoo-odoo,dalegregory/odoo,provaleks/o8,rschnapka/odoo,OpenUpgrade/OpenUpgrade,takis/odoo,abstract-open-solutions/OCB,avoinsystems/odoo,csrocha/OpenUpgrade,guewen/OpenUpgrade,nagyistoce/odoo-dev-odoo,laslabs/odoo,abstract-open-solutions/OCB,kifcaliph/odoo,hassoon3/odoo,syci/OCB,hoatle/odoo,gsmartway/odoo,spadae22/odoo,ccomb/OpenUpgrade,SerpentCS/odoo,OpenUpgrade-dev/OpenUpgrade,ShineFan/odoo,bwrsandman/OpenUpgrade,joariasl/odoo,avoinsystems/odoo,massot/odoo,addition-it-solutions/project-all,camptocamp/ngo-addons-backport,ThinkOpen-Solutions/odoo,0k/odoo,ClearCorp-dev/odoo,rahuldhote/odoo,rgeleta/odoo,Endika/odoo,BT-astauder/odoo,Nick-OpusVL/odoo,steedos/odoo,n0m4dz/odoo,bealdav/OpenUpgrade,JCA-Developpement/Odoo,FlorianLudwig/odoo,srsman/odoo,mszewczy/odoo,stonegithubs/odoo,OSSESAC/odoopubarquiluz,chiragjogi/odoo,stonegithubs/odoo,Endika/odoo,camptocamp/ngo-addons-backport,nitinitprof/odoo,ThinkOpen-Solutions/odoo,ClearCorp-dev/odoo,OpenPymeMx/OCB,wangjun/odoo,RafaelTorrealba/odoo,dgzurita/odoo,Maspear/odoo,fossoult/odoo,blaggacao/OpenUpgrade,ShineFan/odoo,hubsaysnuaa/odoo,gdgellatly/OCB1,lightcn/odoo,OpenPymeMx/OCB,jeasoft/odoo,n0m4dz/odoo,incaser/odoo-odoo,Eric-Zhong/odoo,lsinfo/odoo,grap/OpenUpgrade,MarcosCommunity/odoo,dariemp/odoo,Adel-Magebinary/odoo,hip-odoo/odoo,tinkerthaler/odoo,sadleader/odoo,dsfsdgsbngfggb/odoo,vrenaville/ngo-addons-backport,vrenaville/ngo-addons-backport,oasiswork/odoo,ojengwa/odoo,grap/OpenUpgrade,avoinsystems/odoo,bkirui/odoo,bakhtout/odoo-educ,KontorConsulting/odoo,ramitalat/odoo,cdrooom/odoo,kifcaliph/odoo,Adel-Magebinary/odoo,ovnicraft/odoo,thanhacun/odoo,cysnake4713/odoo,nagyistoce/odoo-dev-odoo,gorjuce/odoo,javierTerry/odoo,pedrobaeza/odoo,colinnewell/odoo,andreparames/odoo,bkirui/odoo,nuncjo/odoo,xujb/odoo,cloud9UG/odoo,charbeljc/OCB,hopeall/odoo,numerigraphe/odoo,mmbtba/odoo,osvalr/odoo,srsman/odoo,numerigraphe/odoo,cpyou/odoo,shivam1111/odoo,fuhongliang/odoo,sadleader/odoo,Antiun/odoo,Adel-Magebinary/odoo,dsfsdgsbngfggb/odoo,ccomb/OpenUpgrade,steedos/odoo,simongoffin/website_version,hmen89/odoo,dgzurita/odoo,tangyiyong/odoo,rubencabrera/odoo,rubencabrera/odoo,fgesora/odoo,fevxie/odoo,hanicker/odoo,fgesora/odoo,Noviat/odoo,goliveirab/odoo,janocat/odoo,Ichag/odoo,credativUK/OCB,havt/odoo,nuuuboo/odoo,Noviat/odoo,shingonoide/odoo,markeTIC/OCB,kittiu/odoo,kirca/OpenUpgrade,frouty/odoo_oph,mvaled/OpenUpgrade,hmen89/odoo,kybriainfotech/iSocioCRM,CopeX/odoo,shingonoide/odoo,markeTIC/OCB,nexiles/odoo,gdgellatly/OCB1,fjbatresv/odoo,bguillot/OpenUpgrade,spadae22/odoo,Bachaco-ve/odoo,hoatle/odoo,mvaled/OpenUpgrade,glovebx/odoo,janocat/odoo,mkieszek/odoo,nuncjo/odoo,agrista/odoo-saas,bealdav/OpenUpgrade,jeasoft/odoo,arthru/OpenUpgrade,alqfahad/odoo,markeTIC/OCB,RafaelTorrealba/odoo,stonegithubs/odoo,CopeX/odoo,jpshort/odoo,takis/odoo,luistorresm/odoo,hifly/OpenUpgrade,srsman/odoo,hassoon3/odoo,dalegregory/odoo,gorjuce/odoo,pedrobaeza/odoo,Ichag/odoo,oihane/odoo,gdgellatly/OCB1,odootr/odoo,provaleks/o8,CopeX/odoo,JGarcia-Panach/odoo,apocalypsebg/odoo,savoirfairelinux/odoo,QianBIG/odoo,mlaitinen/odoo,JonathanStein/odoo,ingadhoc/odoo,optima-ict/odoo,zchking/odoo,syci/OCB,mmbtba/odoo,cysnake4713/odoo,sv-dev1/odoo,chiragjogi/odoo,Nowheresly/odoo,RafaelTorrealba/odoo,bobisme/odoo,fgesora/odoo,odoousers2014/odoo,bwrsandman/OpenUpgrade,odooindia/odoo,CopeX/odoo,OpenUpgrade-dev/OpenUpgrade,apocalypsebg/odoo,abstract-open-solutions/OCB,frouty/odoogoeen,tinkhaven-organization/odoo,sadleader/odoo,JCA-Developpement/Odoo,ramadhane/odoo,mszewczy/odoo,podemos-info/odoo,omprakasha/odoo,cdrooom/odoo,jolevq/odoopub,funkring/fdoo,ClearCorp-dev/odoo,shaufi10/odoo,frouty/odoogoeen,sinbazhou/odoo,fdvarela/odoo8,JonathanStein/odoo,fjbatresv/odoo,virgree/odoo,RafaelTorrealba/odoo,ecosoft-odoo/odoo,alhashash/odoo,shivam1111/odoo,mkieszek/odoo,cloud9UG/odoo,Bachaco-ve/odoo,christophlsa/odoo,osvalr/odoo,windedge/odoo,lgscofield/odoo,VitalPet/odoo,realsaiko/odoo,fuhongliang/odoo,codekaki/odoo,klunwebale/odoo,dsfsdgsbngfggb/odoo,slevenhagen/odoo-npg,QianBIG/odoo,stephen144/odoo,virgree/odoo,christophlsa/odoo,Adel-Magebinary/odoo,nhomar/odoo-mirror,klunwebale/odoo,steedos/odoo,papouso/odoo,deKupini/erp,hbrunn/OpenUpgrade,mustafat/odoo-1,diagramsoftware/odoo,sv-dev1/odoo,leorochael/odoo,CubicERP/odoo,QianBIG/odoo,minhtuancn/odoo,dfang/odoo,windedge/odoo,mvaled/OpenUpgrade,dsfsdgsbngfggb/odoo,x111ong/odoo,RafaelTorrealba/odoo,CatsAndDogsbvba/odoo,Endika/OpenUpgrade,RafaelTorrealba/odoo,ubic135/odoo-design,fossoult/odoo,fevxie/odoo,hanicker/odoo,odootr/odoo,ihsanudin/odoo,cedk/odoo,goliveirab/odoo,klunwebale/odoo,florentx/OpenUpgrade,florian-dacosta/OpenUpgrade,florian-dacosta/OpenUpgrade,mkieszek/odoo,damdam-s/OpenUpgrade,jeasoft/odoo,colinnewell/odoo,gsmartway/odoo,frouty/odoo_oph,avoinsystems/odoo,odooindia/odoo,AuyaJackie/odoo,ramitalat/odoo,hip-odoo/odoo,guerrerocarlos/odoo,goliveirab/odoo,rgeleta/odoo,cedk/odoo,florentx/OpenUpgrade,avoinsystems/odoo,markeTIC/OCB,rowemoore/odoo,hopeall/odoo,ubic135/odoo-design,Daniel-CA/odoo,havt/odoo,slevenhagen/odoo-npg,codekaki/odoo,OpenUpgrade-dev/OpenUpgrade,grap/OCB,wangjun/odoo,lgscofield/odoo,CatsAndDogsbvba/odoo,prospwro/odoo,papouso/odoo,shaufi10/odoo,javierTerry/odoo,eino-makitalo/odoo,Eric-Zhong/odoo,highco-groupe/odoo,Kilhog/odoo,virgree/odoo,dalegregory/odoo,Noviat/odoo,ygol/odoo,codekaki/odoo,BT-fgarbely/odoo,luistorresm/odoo,christophlsa/odoo,Elico-Corp/odoo_OCB,microcom/odoo,jiachenning/odoo,leorochael/odoo,shaufi/odoo,Endika/odoo,credativUK/OCB,oliverhr/odoo,jaxkodex/odoo,MarcosCommunity/odoo,odoo-turkiye/odoo,apanju/odoo,gdgellatly/OCB1,guerrerocarlos/odoo,ecosoft-odoo/odoo,andreparames/odoo,janocat/odoo,oliverhr/odoo,xujb/odoo,demon-ru/iml-crm,Ernesto99/odoo,pedrobaeza/odoo,dkubiak789/odoo,javierTerry/odoo,papouso/odoo,grap/OpenUpgrade,Endika/OpenUpgrade,SerpentCS/odoo,ubic135/odoo-design,grap/OCB,codekaki/odoo,Codefans-fan/odoo,spadae22/odoo,havt/odoo,ShineFan/odoo,fossoult/odoo,stephen144/odoo,OpenPymeMx/OCB,Kilhog/odoo,provaleks/o8,ojengwa/odoo,gsmartway/odoo,vnsofthe/odoo,AuyaJackie/odoo,NL66278/OCB,hifly/OpenUpgrade,codekaki/odoo,BT-rmartin/odoo,lombritz/odoo,lsinfo/odoo,vnsofthe/odoo,brijeshkesariya/odoo,dsfsdgsbngfggb/odoo,sv-dev1/odoo,Kilhog/odoo,ygol/odoo,hassoon3/odoo,virgree/odoo,juanalfonsopr/odoo,kybriainfotech/iSocioCRM,eino-makitalo/odoo,ehirt/odoo,vrenaville/ngo-addons-backport,nhomar/odoo-mirror,sysadminmatmoz/OCB,JGarcia-Panach/odoo,vrenaville/ngo-addons-backport,Nowheresly/odoo,hassoon3/odoo,alexcuellar/odoo,guerrerocarlos/odoo,windedge/odoo,lombritz/odoo,windedge/odoo,nitinitprof/odoo,podemos-info/odoo,massot/odoo,frouty/odoo_oph,makinacorpus/odoo,xzYue/odoo,jiangzhixiao/odoo,oasiswork/odoo,xujb/odoo,shingonoide/odoo,gvb/odoo,mlaitinen/odoo,vrenaville/ngo-addons-backport,CubicERP/odoo,christophlsa/odoo,lombritz/odoo,srimai/odoo,factorlibre/OCB,tarzan0820/odoo,Nick-OpusVL/odoo,dgzurita/odoo,camptocamp/ngo-addons-backport,rdeheele/odoo,ChanduERP/odoo,kybriainfotech/iSocioCRM,Gitlab11/odoo,pedrobaeza/odoo,alqfahad/odoo,dezynetechnologies/odoo,rahuldhote/odoo,synconics/odoo,realsaiko/odoo,BT-fgarbely/odoo,tvtsoft/odoo8,OSSESAC/odoopubarquiluz,tvibliani/odoo,sve-odoo/odoo,inspyration/odoo,kirca/OpenUpgrade,dariemp/odoo,papouso/odoo,vnsofthe/odoo,tangyiyong/odoo,ihsanudin/odoo,sebalix/OpenUpgrade,takis/odoo,leoliujie/odoo,fossoult/odoo,rdeheele/odoo,ChanduERP/odoo,leorochael/odoo,JGarcia-Panach/odoo,nagyistoce/odoo-dev-odoo,datenbetrieb/odoo,jusdng/odoo,havt/odoo,AuyaJackie/odoo,xujb/odoo,hubsaysnuaa/odoo,rubencabrera/odoo,FlorianLudwig/odoo,numerigraphe/odoo,odoo-turkiye/odoo,optima-ict/odoo,ecosoft-odoo/odoo,microcom/odoo,JCA-Developpement/Odoo,csrocha/OpenUpgrade,diagramsoftware/odoo,gdgellatly/OCB1,arthru/OpenUpgrade,lgscofield/odoo,alqfahad/odoo,podemos-info/odoo,PongPi/isl-odoo,florentx/OpenUpgrade,incaser/odoo-odoo,rschnapka/odoo,n0m4dz/odoo,Eric-Zhong/odoo,simongoffin/website_version,chiragjogi/odoo,makinacorpus/odoo,CatsAndDogsbvba/odoo,dalegregory/odoo,gavin-feng/odoo,PongPi/isl-odoo,BT-rmartin/odoo,patmcb/odoo,damdam-s/OpenUpgrade,SerpentCS/odoo,Daniel-CA/odoo,jiangzhixiao/odoo,jiangzhixiao/odoo,srsman/odoo,grap/OpenUpgrade,srimai/odoo,tarzan0820/odoo,kybriainfotech/iSocioCRM,charbeljc/OCB,stephen144/odoo,Ichag/odoo,dkubiak789/odoo,pedrobaeza/OpenUpgrade,cloud9UG/odoo,kybriainfotech/iSocioCRM,colinnewell/odoo,thanhacun/odoo,leorochael/odoo,datenbetrieb/odoo,grap/OCB,Bachaco-ve/odoo,AuyaJackie/odoo,incaser/odoo-odoo,kittiu/odoo,jpshort/odoo,nexiles/odoo,idncom/odoo,bakhtout/odoo-educ,JGarcia-Panach/odoo,nitinitprof/odoo,hubsaysnuaa/odoo,datenbetrieb/odoo,jpshort/odoo,BT-ojossen/odoo,Nick-OpusVL/odoo,rschnapka/odoo,makinacorpus/odoo,camptocamp/ngo-addons-backport,glovebx/odoo,VitalPet/odoo,alexcuellar/odoo,lsinfo/odoo,Antiun/odoo,provaleks/o8,cloud9UG/odoo,javierTerry/odoo,Endika/odoo,ujjwalwahi/odoo,rdeheele/odoo,Gitlab11/odoo,hopeall/odoo,mszewczy/odoo,demon-ru/iml-crm,prospwro/odoo,lsinfo/odoo,shaufi/odoo,naousse/odoo,ehirt/odoo,srimai/odoo,Eric-Zhong/odoo,doomsterinc/odoo,tinkerthaler/odoo,incaser/odoo-odoo,ApuliaSoftware/odoo,PongPi/isl-odoo,hopeall/odoo,gsmartway/odoo,massot/odoo,kirca/OpenUpgrade,luistorresm/odoo,KontorConsulting/odoo,glovebx/odoo,mlaitinen/odoo,csrocha/OpenUpgrade,oihane/odoo,cloud9UG/odoo,guerrerocarlos/odoo,Codefans-fan/odoo,alhashash/odoo,camptocamp/ngo-addons-backport,BT-astauder/odoo,minhtuancn/odoo,janocat/odoo,Adel-Magebinary/odoo,mszewczy/odoo,nuuuboo/odoo,bwrsandman/OpenUpgrade,prospwro/odoo,srsman/odoo,NeovaHealth/odoo,makinacorpus/odoo,syci/OCB,shaufi/odoo,apanju/GMIO_Odoo,nagyistoce/odoo-dev-odoo,Kilhog/odoo,jaxkodex/odoo,n0m4dz/odoo,Ernesto99/odoo,matrixise/odoo,jolevq/odoopub,alexcuellar/odoo,salaria/odoo,mlaitinen/odoo,oihane/odoo,0k/odoo,ApuliaSoftware/odoo,colinnewell/odoo,bkirui/odoo,sve-odoo/odoo,hifly/OpenUpgrade,draugiskisprendimai/odoo,oihane/odoo,dariemp/odoo,NL66278/OCB,nhomar/odoo-mirror,luiseduardohdbackup/odoo,tinkerthaler/odoo,lombritz/odoo,ojengwa/odoo,mustafat/odoo-1,hbrunn/OpenUpgrade,nuuuboo/odoo,poljeff/odoo,ClearCorp-dev/odoo,mkieszek/odoo,florian-dacosta/OpenUpgrade,dkubiak789/odoo,steedos/odoo,lombritz/odoo,oasiswork/odoo,jolevq/odoopub,hassoon3/odoo,dkubiak789/odoo,bobisme/odoo,mustafat/odoo-1,jeasoft/odoo,lightcn/odoo,sebalix/OpenUpgrade,odootr/odoo,frouty/odoogoeen,acshan/odoo,JCA-Developpement/Odoo,elmerdpadilla/iv,Codefans-fan/odoo,bplancher/odoo,ApuliaSoftware/odoo,nhomar/odoo,cedk/odoo,apocalypsebg/odoo,alexcuellar/odoo,rgeleta/odoo,omprakasha/odoo,christophlsa/odoo,ingadhoc/odoo,jeasoft/odoo,dfang/odoo,SerpentCS/odoo,credativUK/OCB,factorlibre/OCB,hip-odoo/odoo,Kilhog/odoo,CubicERP/odoo,patmcb/odoo,KontorConsulting/odoo,rubencabrera/odoo,doomsterinc/odoo,rubencabrera/odoo,odoousers2014/odoo,jfpla/odoo,provaleks/o8,realsaiko/odoo,NeovaHealth/odoo,gorjuce/odoo,javierTerry/odoo,ccomb/OpenUpgrade,OpenPymeMx/OCB,ingadhoc/odoo,ojengwa/odoo,TRESCLOUD/odoopub,provaleks/o8,Noviat/odoo,JonathanStein/odoo,Danisan/odoo-1,csrocha/OpenUpgrade,thanhacun/odoo,kifcaliph/odoo,Bachaco-ve/odoo,ApuliaSoftware/odoo,camptocamp/ngo-addons-backport,Nick-OpusVL/odoo,juanalfonsopr/odoo,Endika/odoo,naousse/odoo,mkieszek/odoo,odooindia/odoo,gvb/odoo,oliverhr/odoo,poljeff/odoo,ecosoft-odoo/odoo,KontorConsulting/odoo,fuselock/odoo,sv-dev1/odoo,joariasl/odoo,hifly/OpenUpgrade,jolevq/odoopub,gvb/odoo,demon-ru/iml-crm,jesramirez/odoo,slevenhagen/odoo,Ernesto99/odoo,tinkerthaler/odoo,KontorConsulting/odoo,FlorianLudwig/odoo,mkieszek/odoo,jaxkodex/odoo,ehirt/odoo,GauravSahu/odoo,Bachaco-ve/odoo,omprakasha/odoo,aviciimaxwell/odoo,storm-computers/odoo,Gitlab11/odoo,Antiun/odoo,OpenUpgrade/OpenUpgrade,doomsterinc/odoo,Nowheresly/odoo,optima-ict/odoo,diagramsoftware/odoo,spadae22/odoo,virgree/odoo,Drooids/odoo,realsaiko/odoo,savoirfairelinux/OpenUpgrade,cedk/odoo,aviciimaxwell/odoo,xzYue/odoo,frouty/odoogoeen,shingonoide/odoo,ovnicraft/odoo,odoo-turkiye/odoo,luiseduardohdbackup/odoo,guewen/OpenUpgrade,jaxkodex/odoo,eino-makitalo/odoo,jesramirez/odoo,bplancher/odoo,jesramirez/odoo,alqfahad/odoo,alhashash/odoo,goliveirab/odoo,diagramsoftware/odoo,steedos/odoo,damdam-s/OpenUpgrade,apanju/odoo,andreparames/odoo,cedk/odoo,hoatle/odoo,odootr/odoo,fuhongliang/odoo,gorjuce/odoo,abstract-open-solutions/OCB,tvibliani/odoo,markeTIC/OCB,odooindia/odoo,stephen144/odoo,Daniel-CA/odoo,Elico-Corp/odoo_OCB,diagramsoftware/odoo,jusdng/odoo,Kilhog/odoo,AuyaJackie/odoo,rowemoore/odoo,SAM-IT-SA/odoo,Gitlab11/odoo,arthru/OpenUpgrade,savoirfairelinux/OpenUpgrade,dariemp/odoo,jesramirez/odoo,MarcosCommunity/odoo,blaggacao/OpenUpgrade,slevenhagen/odoo-npg,deKupini/erp,agrista/odoo-saas,NL66278/OCB,feroda/odoo,rgeleta/odoo,dariemp/odoo,mszewczy/odoo,tangyiyong/odoo,bobisme/odoo,fuselock/odoo,spadae22/odoo,ChanduERP/odoo,numerigraphe/odoo,pplatek/odoo,joshuajan/odoo,bguillot/OpenUpgrade,jfpla/odoo,ihsanudin/odoo,BT-astauder/odoo,shaufi10/odoo,stonegithubs/odoo,sergio-incaser/odoo,xzYue/odoo,SerpentCS/odoo,salaria/odoo,csrocha/OpenUpgrade,sysadminmatmoz/OCB,deKupini/erp,aviciimaxwell/odoo,slevenhagen/odoo-npg,simongoffin/website_version,brijeshkesariya/odoo,oliverhr/odoo,BT-fgarbely/odoo,oliverhr/odoo,fuhongliang/odoo,synconics/odoo,klunwebale/odoo,goliveirab/odoo,OpenPymeMx/OCB,tinkhaven-organization/odoo,tvtsoft/odoo8,zchking/odoo,mustafat/odoo-1,factorlibre/OCB,fjbatresv/odoo,odoo-turkiye/odoo,sv-dev1/odoo,Grirrane/odoo,numerigraphe/odoo,xzYue/odoo,VielSoft/odoo,thanhacun/odoo,luistorresm/odoo,hopeall/odoo,elmerdpadilla/iv,nhomar/odoo,ChanduERP/odoo,bguillot/OpenUpgrade,janocat/odoo,charbeljc/OCB,jeasoft/odoo,nuncjo/odoo,zchking/odoo,JCA-Developpement/Odoo,hubsaysnuaa/odoo,OSSESAC/odoopubarquiluz,shaufi10/odoo,ramadhane/odoo,Bachaco-ve/odoo,sebalix/OpenUpgrade,shingonoide/odoo,srsman/odoo,jiachenning/odoo,savoirfairelinux/odoo,shaufi10/odoo,Gitlab11/odoo,jpshort/odoo,ujjwalwahi/odoo,ygol/odoo,damdam-s/OpenUpgrade,fjbatresv/odoo,alhashash/odoo,tinkhaven-organization/odoo,luistorresm/odoo,naousse/odoo,gdgellatly/OCB1,sergio-incaser/odoo,addition-it-solutions/project-all,brijeshkesariya/odoo,rdeheele/odoo,pplatek/odoo,xujb/odoo,odootr/odoo,cedk/odoo,salaria/odoo,bobisme/odoo,Elico-Corp/odoo_OCB,RafaelTorrealba/odoo,ojengwa/odoo,cysnake4713/odoo,poljeff/odoo,sv-dev1/odoo,Elico-Corp/odoo_OCB,CatsAndDogsbvba/odoo,Antiun/odoo,sergio-incaser/odoo,PongPi/isl-odoo,sergio-incaser/odoo,Danisan/odoo-1,fossoult/odoo,nexiles/odoo,andreparames/odoo,salaria/odoo,vrenaville/ngo-addons-backport,grap/OCB,andreparames/odoo,odoousers2014/odoo,nuuuboo/odoo,dezynetechnologies/odoo,codekaki/odoo,0k/OpenUpgrade,fossoult/odoo,bkirui/odoo,tvtsoft/odoo8,jiangzhixiao/odoo,elmerdpadilla/iv,cpyou/odoo,xujb/odoo,vnsofthe/odoo,kittiu/odoo,VitalPet/odoo,tvtsoft/odoo8,collex100/odoo,ehirt/odoo,joariasl/odoo,fevxie/odoo,alexteodor/odoo,microcom/odoo,factorlibre/OCB,x111ong/odoo,florentx/OpenUpgrade,JonathanStein/odoo,sysadminmatmoz/OCB,alqfahad/odoo,omprakasha/odoo,leoliujie/odoo,shaufi/odoo,rgeleta/odoo,tangyiyong/odoo,idncom/odoo,lightcn/odoo,NeovaHealth/odoo,MarcosCommunity/odoo
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml"
],
"active": False,
"installable": True
}
Add sale_security.xml file entry in update_xml section
bzr revid: mga@tinyerp.com-b44d9932402c4941921f83487b823e7359d324c0
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml",
"sale_security.xml"
],
"active": False,
"installable": True
}
|
<commit_before>{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml"
],
"active": False,
"installable": True
}
<commit_msg>Add sale_security.xml file entry in update_xml section
bzr revid: mga@tinyerp.com-b44d9932402c4941921f83487b823e7359d324c0<commit_after>
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml",
"sale_security.xml"
],
"active": False,
"installable": True
}
|
{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml"
],
"active": False,
"installable": True
}
Add sale_security.xml file entry in update_xml section
bzr revid: mga@tinyerp.com-b44d9932402c4941921f83487b823e7359d324c0{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml",
"sale_security.xml"
],
"active": False,
"installable": True
}
|
<commit_before>{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml"
],
"active": False,
"installable": True
}
<commit_msg>Add sale_security.xml file entry in update_xml section
bzr revid: mga@tinyerp.com-b44d9932402c4941921f83487b823e7359d324c0<commit_after>{
"name" : "Sales Management",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_sale.html",
"depends" : ["product", "stock", "mrp"],
"category" : "Generic Modules/Sales & Purchases",
"init_xml" : [],
"demo_xml" : ["sale_demo.xml", "sale_unit_test.xml"],
"description": """
The base module to manage quotations and sales orders.
* Workflow with validation steps:
- Quotation -> Sale order -> Invoice
* Invoicing methods:
- Invoice on order (before or after shipping)
- Invoice on delivery
- Invoice on timesheets
- Advance invoice
* Partners preferences (shipping, invoicing, incoterm, ...)
* Products stocks and prices
* Delivery methods:
- all at once, multi-parcel
- delivery costs
""",
"update_xml" : [
"sale_workflow.xml",
"sale_sequence.xml",
"sale_data.xml",
"sale_view.xml",
"sale_report.xml",
"sale_wizard.xml",
"stock_view.xml",
"sale_security.xml"
],
"active": False,
"installable": True
}
|
3ae053008cc8ab7b8bac2a392cd95a12fc38974e
|
examples/io.py
|
examples/io.py
|
import fiona
# This module contains examples of opening files to get feature collections in
# different ways.
#
# It is meant to be run from the distribution root, the directory containing
# setup.py.
#
# A ``path`` is always the ``open()`` function's first argument. It can be
# absolute or relative to the working directory. It is the only positional
# argument, though it is conventional to use the mode as a 2nd positional
# argument.
# 1. Opening a file with a single data layer (shapefiles, etc).
#
# args: path, mode
# kwds: none
#
# The relative path to a file on the filesystem is given and its single layer
# is selected implicitly (a shapefile has a single layer). The file is opened
# for reading (mode 'r'), but since this is the default, we'll omit it in
# following examples.
with fiona.open('docs/data/test_uk.shp', 'r') as c:
assert len(c) == 48
# 2. Opening a file with explicit layer selection (FileGDB, etc).
#
# args: path
# kwds: layer
#
# Same as above but layer specified explicitly by name..
with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c:
assert len(c) == 48
# 3. Opening a directory for access to a single file.
#
# args: path
# kwds: layer
#
# Same as above but using the path to the directory containing the shapefile,
# specified explicitly by name.
with fiona.open('docs/data', layer="test_uk") as c:
assert len(c) == 48
# 4. Opening a single file within a zip archive.
#
# args: path
# kwds: vfs
#
# Open a file given its absolute path within a virtual filesystem. The VFS
# is given an Apache Commons VFS identifier. It may contain either an absolute
# path or a path relative to the working directory.
with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
# 5. Opening a directory within a zip archive to select a layer.
#
# args: path
# kwds: layer, vfs
#
# The most complicated case. As above, but specifying the root directory within
# the virtual filesystem as the path and the layer by name (combination of
# 4 and 3). It ought to be possible to open a file geodatabase within a zip
# file like this.
with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
|
Add module of IO examples.
|
Add module of IO examples.
Getting this into the manual is the next step.
|
Python
|
bsd-3-clause
|
perrygeo/Fiona,Toblerity/Fiona,Toblerity/Fiona,rbuffat/Fiona,perrygeo/Fiona,rbuffat/Fiona,johanvdw/Fiona
|
Add module of IO examples.
Getting this into the manual is the next step.
|
import fiona
# This module contains examples of opening files to get feature collections in
# different ways.
#
# It is meant to be run from the distribution root, the directory containing
# setup.py.
#
# A ``path`` is always the ``open()`` function's first argument. It can be
# absolute or relative to the working directory. It is the only positional
# argument, though it is conventional to use the mode as a 2nd positional
# argument.
# 1. Opening a file with a single data layer (shapefiles, etc).
#
# args: path, mode
# kwds: none
#
# The relative path to a file on the filesystem is given and its single layer
# is selected implicitly (a shapefile has a single layer). The file is opened
# for reading (mode 'r'), but since this is the default, we'll omit it in
# following examples.
with fiona.open('docs/data/test_uk.shp', 'r') as c:
assert len(c) == 48
# 2. Opening a file with explicit layer selection (FileGDB, etc).
#
# args: path
# kwds: layer
#
# Same as above but layer specified explicitly by name..
with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c:
assert len(c) == 48
# 3. Opening a directory for access to a single file.
#
# args: path
# kwds: layer
#
# Same as above but using the path to the directory containing the shapefile,
# specified explicitly by name.
with fiona.open('docs/data', layer="test_uk") as c:
assert len(c) == 48
# 4. Opening a single file within a zip archive.
#
# args: path
# kwds: vfs
#
# Open a file given its absolute path within a virtual filesystem. The VFS
# is given an Apache Commons VFS identifier. It may contain either an absolute
# path or a path relative to the working directory.
with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
# 5. Opening a directory within a zip archive to select a layer.
#
# args: path
# kwds: layer, vfs
#
# The most complicated case. As above, but specifying the root directory within
# the virtual filesystem as the path and the layer by name (combination of
# 4 and 3). It ought to be possible to open a file geodatabase within a zip
# file like this.
with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
|
<commit_before><commit_msg>Add module of IO examples.
Getting this into the manual is the next step.<commit_after>
|
import fiona
# This module contains examples of opening files to get feature collections in
# different ways.
#
# It is meant to be run from the distribution root, the directory containing
# setup.py.
#
# A ``path`` is always the ``open()`` function's first argument. It can be
# absolute or relative to the working directory. It is the only positional
# argument, though it is conventional to use the mode as a 2nd positional
# argument.
# 1. Opening a file with a single data layer (shapefiles, etc).
#
# args: path, mode
# kwds: none
#
# The relative path to a file on the filesystem is given and its single layer
# is selected implicitly (a shapefile has a single layer). The file is opened
# for reading (mode 'r'), but since this is the default, we'll omit it in
# following examples.
with fiona.open('docs/data/test_uk.shp', 'r') as c:
assert len(c) == 48
# 2. Opening a file with explicit layer selection (FileGDB, etc).
#
# args: path
# kwds: layer
#
# Same as above but layer specified explicitly by name..
with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c:
assert len(c) == 48
# 3. Opening a directory for access to a single file.
#
# args: path
# kwds: layer
#
# Same as above but using the path to the directory containing the shapefile,
# specified explicitly by name.
with fiona.open('docs/data', layer="test_uk") as c:
assert len(c) == 48
# 4. Opening a single file within a zip archive.
#
# args: path
# kwds: vfs
#
# Open a file given its absolute path within a virtual filesystem. The VFS
# is given an Apache Commons VFS identifier. It may contain either an absolute
# path or a path relative to the working directory.
with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
# 5. Opening a directory within a zip archive to select a layer.
#
# args: path
# kwds: layer, vfs
#
# The most complicated case. As above, but specifying the root directory within
# the virtual filesystem as the path and the layer by name (combination of
# 4 and 3). It ought to be possible to open a file geodatabase within a zip
# file like this.
with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
|
Add module of IO examples.
Getting this into the manual is the next step.
import fiona
# This module contains examples of opening files to get feature collections in
# different ways.
#
# It is meant to be run from the distribution root, the directory containing
# setup.py.
#
# A ``path`` is always the ``open()`` function's first argument. It can be
# absolute or relative to the working directory. It is the only positional
# argument, though it is conventional to use the mode as a 2nd positional
# argument.
# 1. Opening a file with a single data layer (shapefiles, etc).
#
# args: path, mode
# kwds: none
#
# The relative path to a file on the filesystem is given and its single layer
# is selected implicitly (a shapefile has a single layer). The file is opened
# for reading (mode 'r'), but since this is the default, we'll omit it in
# following examples.
with fiona.open('docs/data/test_uk.shp', 'r') as c:
assert len(c) == 48
# 2. Opening a file with explicit layer selection (FileGDB, etc).
#
# args: path
# kwds: layer
#
# Same as above but layer specified explicitly by name..
with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c:
assert len(c) == 48
# 3. Opening a directory for access to a single file.
#
# args: path
# kwds: layer
#
# Same as above but using the path to the directory containing the shapefile,
# specified explicitly by name.
with fiona.open('docs/data', layer="test_uk") as c:
assert len(c) == 48
# 4. Opening a single file within a zip archive.
#
# args: path
# kwds: vfs
#
# Open a file given its absolute path within a virtual filesystem. The VFS
# is given an Apache Commons VFS identifier. It may contain either an absolute
# path or a path relative to the working directory.
with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
# 5. Opening a directory within a zip archive to select a layer.
#
# args: path
# kwds: layer, vfs
#
# The most complicated case. As above, but specifying the root directory within
# the virtual filesystem as the path and the layer by name (combination of
# 4 and 3). It ought to be possible to open a file geodatabase within a zip
# file like this.
with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
|
<commit_before><commit_msg>Add module of IO examples.
Getting this into the manual is the next step.<commit_after>
import fiona
# This module contains examples of opening files to get feature collections in
# different ways.
#
# It is meant to be run from the distribution root, the directory containing
# setup.py.
#
# A ``path`` is always the ``open()`` function's first argument. It can be
# absolute or relative to the working directory. It is the only positional
# argument, though it is conventional to use the mode as a 2nd positional
# argument.
# 1. Opening a file with a single data layer (shapefiles, etc).
#
# args: path, mode
# kwds: none
#
# The relative path to a file on the filesystem is given and its single layer
# is selected implicitly (a shapefile has a single layer). The file is opened
# for reading (mode 'r'), but since this is the default, we'll omit it in
# following examples.
with fiona.open('docs/data/test_uk.shp', 'r') as c:
assert len(c) == 48
# 2. Opening a file with explicit layer selection (FileGDB, etc).
#
# args: path
# kwds: layer
#
# Same as above but layer specified explicitly by name..
with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c:
assert len(c) == 48
# 3. Opening a directory for access to a single file.
#
# args: path
# kwds: layer
#
# Same as above but using the path to the directory containing the shapefile,
# specified explicitly by name.
with fiona.open('docs/data', layer="test_uk") as c:
assert len(c) == 48
# 4. Opening a single file within a zip archive.
#
# args: path
# kwds: vfs
#
# Open a file given its absolute path within a virtual filesystem. The VFS
# is given an Apache Commons VFS identifier. It may contain either an absolute
# path or a path relative to the working directory.
with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
# 5. Opening a directory within a zip archive to select a layer.
#
# args: path
# kwds: layer, vfs
#
# The most complicated case. As above, but specifying the root directory within
# the virtual filesystem as the path and the layer by name (combination of
# 4 and 3). It ought to be possible to open a file geodatabase within a zip
# file like this.
with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c:
assert len(c) == 48
|
|
92a9cf771a60d377ee2f7dce5f50bf5be9033b19
|
src/qinfer/test_tomography_qubit.py
|
src/qinfer/test_tomography_qubit.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 18:53:24 2012
@author: csferrie
"""
## FEATURES ####################################################################
from __future__ import division
## IMPORTS #####################################################################
import getpass
import sys
sys.path.append({
'csferrie': 'C:/Users/csferrie/Documents/GitHub/python-qinfer/src/',
'cgranade': '/home/cgranade/academics/software-projects/python-qinfer/src/'
}[getpass.getuser()])
import numpy as np
from qinfer import tomography, smc
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import time
if __name__ == "__main__":
N_PARTICLES = 250
# Model and prior initialization
prior = tomography.HaarUniform()
model = tomography.QubitStatePauliModel()
expparams = np.array([1])
# SMC initialization
updater = smc.SMCUpdater(model, N_PARTICLES, prior,resample_a=0.98, resample_thresh=0)
tic = toc = None
# Sample true set of modelparams
truemp = np.array([prior.sample()])
# Get all Bayesian up in here
n_exp = 2
tic = time.time()
for idx_exp in xrange(n_exp):
outcome = model.simulate_experiment(truemp, expparams)
updater.update(outcome, expparams)
toc = time.time() - tic
print "True param: {}".format(truemp)
print "Est. mean: {}".format(updater.est_mean())
print "Est. cov: {}".format(updater.est_covariance_mtx())
print "Error: {}".format(np.sum(np.abs(truemp[0]-updater.est_mean())**2))
print "Trace Cov: {}".format(np.trace(updater.est_covariance_mtx()))
print "Resample count: {}".format(updater.resample_count)
print "Elapsed time: {}".format(toc)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
particles = updater.particle_locations
weights = updater.particle_weights
minweight = min(weights)
maxweight = max(weights)
ax.scatter(particles[:,0],particles[:,1],particles[:,2], s = 100*weights/maxweight)
ax.scatter(truemp[:,0],truemp[:,1],truemp[:,2],c = 'red')
plt.show()
|
Add testing code for tomography module
|
Add testing code for tomography module
|
Python
|
agpl-3.0
|
Alan-Robertson/python-qinfer,csferrie/python-qinfer,QInfer/python-qinfer,whitewhim2718/python-qinfer,MichalKononenko/python-qinfer,ihincks/python-qinfer
|
Add testing code for tomography module
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 18:53:24 2012
@author: csferrie
"""
## FEATURES ####################################################################
from __future__ import division
## IMPORTS #####################################################################
import getpass
import sys
sys.path.append({
'csferrie': 'C:/Users/csferrie/Documents/GitHub/python-qinfer/src/',
'cgranade': '/home/cgranade/academics/software-projects/python-qinfer/src/'
}[getpass.getuser()])
import numpy as np
from qinfer import tomography, smc
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import time
if __name__ == "__main__":
N_PARTICLES = 250
# Model and prior initialization
prior = tomography.HaarUniform()
model = tomography.QubitStatePauliModel()
expparams = np.array([1])
# SMC initialization
updater = smc.SMCUpdater(model, N_PARTICLES, prior,resample_a=0.98, resample_thresh=0)
tic = toc = None
# Sample true set of modelparams
truemp = np.array([prior.sample()])
# Get all Bayesian up in here
n_exp = 2
tic = time.time()
for idx_exp in xrange(n_exp):
outcome = model.simulate_experiment(truemp, expparams)
updater.update(outcome, expparams)
toc = time.time() - tic
print "True param: {}".format(truemp)
print "Est. mean: {}".format(updater.est_mean())
print "Est. cov: {}".format(updater.est_covariance_mtx())
print "Error: {}".format(np.sum(np.abs(truemp[0]-updater.est_mean())**2))
print "Trace Cov: {}".format(np.trace(updater.est_covariance_mtx()))
print "Resample count: {}".format(updater.resample_count)
print "Elapsed time: {}".format(toc)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
particles = updater.particle_locations
weights = updater.particle_weights
minweight = min(weights)
maxweight = max(weights)
ax.scatter(particles[:,0],particles[:,1],particles[:,2], s = 100*weights/maxweight)
ax.scatter(truemp[:,0],truemp[:,1],truemp[:,2],c = 'red')
plt.show()
|
<commit_before><commit_msg>Add testing code for tomography module<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 18:53:24 2012
@author: csferrie
"""
## FEATURES ####################################################################
from __future__ import division
## IMPORTS #####################################################################
import getpass
import sys
sys.path.append({
'csferrie': 'C:/Users/csferrie/Documents/GitHub/python-qinfer/src/',
'cgranade': '/home/cgranade/academics/software-projects/python-qinfer/src/'
}[getpass.getuser()])
import numpy as np
from qinfer import tomography, smc
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import time
if __name__ == "__main__":
N_PARTICLES = 250
# Model and prior initialization
prior = tomography.HaarUniform()
model = tomography.QubitStatePauliModel()
expparams = np.array([1])
# SMC initialization
updater = smc.SMCUpdater(model, N_PARTICLES, prior,resample_a=0.98, resample_thresh=0)
tic = toc = None
# Sample true set of modelparams
truemp = np.array([prior.sample()])
# Get all Bayesian up in here
n_exp = 2
tic = time.time()
for idx_exp in xrange(n_exp):
outcome = model.simulate_experiment(truemp, expparams)
updater.update(outcome, expparams)
toc = time.time() - tic
print "True param: {}".format(truemp)
print "Est. mean: {}".format(updater.est_mean())
print "Est. cov: {}".format(updater.est_covariance_mtx())
print "Error: {}".format(np.sum(np.abs(truemp[0]-updater.est_mean())**2))
print "Trace Cov: {}".format(np.trace(updater.est_covariance_mtx()))
print "Resample count: {}".format(updater.resample_count)
print "Elapsed time: {}".format(toc)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
particles = updater.particle_locations
weights = updater.particle_weights
minweight = min(weights)
maxweight = max(weights)
ax.scatter(particles[:,0],particles[:,1],particles[:,2], s = 100*weights/maxweight)
ax.scatter(truemp[:,0],truemp[:,1],truemp[:,2],c = 'red')
plt.show()
|
Add testing code for tomography module# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 18:53:24 2012
@author: csferrie
"""
## FEATURES ####################################################################
from __future__ import division
## IMPORTS #####################################################################
import getpass
import sys
sys.path.append({
'csferrie': 'C:/Users/csferrie/Documents/GitHub/python-qinfer/src/',
'cgranade': '/home/cgranade/academics/software-projects/python-qinfer/src/'
}[getpass.getuser()])
import numpy as np
from qinfer import tomography, smc
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import time
if __name__ == "__main__":
N_PARTICLES = 250
# Model and prior initialization
prior = tomography.HaarUniform()
model = tomography.QubitStatePauliModel()
expparams = np.array([1])
# SMC initialization
updater = smc.SMCUpdater(model, N_PARTICLES, prior,resample_a=0.98, resample_thresh=0)
tic = toc = None
# Sample true set of modelparams
truemp = np.array([prior.sample()])
# Get all Bayesian up in here
n_exp = 2
tic = time.time()
for idx_exp in xrange(n_exp):
outcome = model.simulate_experiment(truemp, expparams)
updater.update(outcome, expparams)
toc = time.time() - tic
print "True param: {}".format(truemp)
print "Est. mean: {}".format(updater.est_mean())
print "Est. cov: {}".format(updater.est_covariance_mtx())
print "Error: {}".format(np.sum(np.abs(truemp[0]-updater.est_mean())**2))
print "Trace Cov: {}".format(np.trace(updater.est_covariance_mtx()))
print "Resample count: {}".format(updater.resample_count)
print "Elapsed time: {}".format(toc)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
particles = updater.particle_locations
weights = updater.particle_weights
minweight = min(weights)
maxweight = max(weights)
ax.scatter(particles[:,0],particles[:,1],particles[:,2], s = 100*weights/maxweight)
ax.scatter(truemp[:,0],truemp[:,1],truemp[:,2],c = 'red')
plt.show()
|
<commit_before><commit_msg>Add testing code for tomography module<commit_after># -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 18:53:24 2012
@author: csferrie
"""
## FEATURES ####################################################################
from __future__ import division
## IMPORTS #####################################################################
import getpass
import sys
sys.path.append({
'csferrie': 'C:/Users/csferrie/Documents/GitHub/python-qinfer/src/',
'cgranade': '/home/cgranade/academics/software-projects/python-qinfer/src/'
}[getpass.getuser()])
import numpy as np
from qinfer import tomography, smc
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import time
if __name__ == "__main__":
N_PARTICLES = 250
# Model and prior initialization
prior = tomography.HaarUniform()
model = tomography.QubitStatePauliModel()
expparams = np.array([1])
# SMC initialization
updater = smc.SMCUpdater(model, N_PARTICLES, prior,resample_a=0.98, resample_thresh=0)
tic = toc = None
# Sample true set of modelparams
truemp = np.array([prior.sample()])
# Get all Bayesian up in here
n_exp = 2
tic = time.time()
for idx_exp in xrange(n_exp):
outcome = model.simulate_experiment(truemp, expparams)
updater.update(outcome, expparams)
toc = time.time() - tic
print "True param: {}".format(truemp)
print "Est. mean: {}".format(updater.est_mean())
print "Est. cov: {}".format(updater.est_covariance_mtx())
print "Error: {}".format(np.sum(np.abs(truemp[0]-updater.est_mean())**2))
print "Trace Cov: {}".format(np.trace(updater.est_covariance_mtx()))
print "Resample count: {}".format(updater.resample_count)
print "Elapsed time: {}".format(toc)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
particles = updater.particle_locations
weights = updater.particle_weights
minweight = min(weights)
maxweight = max(weights)
ax.scatter(particles[:,0],particles[:,1],particles[:,2], s = 100*weights/maxweight)
ax.scatter(truemp[:,0],truemp[:,1],truemp[:,2],c = 'red')
plt.show()
|
|
69277650b4f1f118a76a7be74f14a71a74849d0c
|
CodeFights/buildPalindrome.py
|
CodeFights/buildPalindrome.py
|
#!/usr/local/bin/python
# Code Fights Build Palindrome Problem
def buildPalindrome(st):
if st == st[::-1]:
return st
for i in range(len(st)):
s = st + st[i::-1]
if s == s[::-1]:
return s
def main():
tests = [
["abcdc", "abcdcba"],
["ababab", "abababa"],
["abba", "abba"],
["abaa", "abaaba"]
]
for t in tests:
res = buildPalindrome(t[0])
ans = t[1]
if ans == res:
print("PASSED: buildPalindrome({}) returned {}"
.format(t[0], res))
else:
print("FAILED: buildPalindrome({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights build palindrome problem
|
Solve Code Fights build palindrome problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights build palindrome problem
|
#!/usr/local/bin/python
# Code Fights Build Palindrome Problem
def buildPalindrome(st):
if st == st[::-1]:
return st
for i in range(len(st)):
s = st + st[i::-1]
if s == s[::-1]:
return s
def main():
tests = [
["abcdc", "abcdcba"],
["ababab", "abababa"],
["abba", "abba"],
["abaa", "abaaba"]
]
for t in tests:
res = buildPalindrome(t[0])
ans = t[1]
if ans == res:
print("PASSED: buildPalindrome({}) returned {}"
.format(t[0], res))
else:
print("FAILED: buildPalindrome({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights build palindrome problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Build Palindrome Problem
def buildPalindrome(st):
if st == st[::-1]:
return st
for i in range(len(st)):
s = st + st[i::-1]
if s == s[::-1]:
return s
def main():
tests = [
["abcdc", "abcdcba"],
["ababab", "abababa"],
["abba", "abba"],
["abaa", "abaaba"]
]
for t in tests:
res = buildPalindrome(t[0])
ans = t[1]
if ans == res:
print("PASSED: buildPalindrome({}) returned {}"
.format(t[0], res))
else:
print("FAILED: buildPalindrome({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights build palindrome problem#!/usr/local/bin/python
# Code Fights Build Palindrome Problem
def buildPalindrome(st):
if st == st[::-1]:
return st
for i in range(len(st)):
s = st + st[i::-1]
if s == s[::-1]:
return s
def main():
tests = [
["abcdc", "abcdcba"],
["ababab", "abababa"],
["abba", "abba"],
["abaa", "abaaba"]
]
for t in tests:
res = buildPalindrome(t[0])
ans = t[1]
if ans == res:
print("PASSED: buildPalindrome({}) returned {}"
.format(t[0], res))
else:
print("FAILED: buildPalindrome({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights build palindrome problem<commit_after>#!/usr/local/bin/python
# Code Fights Build Palindrome Problem
def buildPalindrome(st):
if st == st[::-1]:
return st
for i in range(len(st)):
s = st + st[i::-1]
if s == s[::-1]:
return s
def main():
tests = [
["abcdc", "abcdcba"],
["ababab", "abababa"],
["abba", "abba"],
["abaa", "abaaba"]
]
for t in tests:
res = buildPalindrome(t[0])
ans = t[1]
if ans == res:
print("PASSED: buildPalindrome({}) returned {}"
.format(t[0], res))
else:
print("FAILED: buildPalindrome({}) returned {}, answer: {}"
.format(t[0], res, ans))
if __name__ == '__main__':
main()
|
|
4e9ec5bd0219d4fdda55113f16471bab5d7e41d7
|
string/bm.py
|
string/bm.py
|
# -*- coding:utf-8 -*-
def build_bad_char_map(pattern_s, sz=256):
bcmap = [-1] * sz
for i,s in enumerate(pattern_s):
bcmap[ord(s)] = i
return bcmap
def build_good_suffix_prefix_map(pattern_s):
m = len(pattern_s)
prefix, suffix = [False] * m, [-1] * m
for i in range(m - 1):
j = i
k = 0
while j >= 0 and pattern_s[j] == pattern_s[m - k - 1]:
k += 1
suffix[k] = j
j -= 1
if j < 0:
prefix[k] = True
return prefix, suffix
def move_forward_by_gsp(bc_p, m, suffix, prefix):
k = m - 1 - j
if suffix[k] != -1:
return j - suffix[k] + 1
r = j + 2
while r < m:
if prefix[m - r] == True:
return r
def bm(string, pattern_s):
m = len(pattern_s)
n = len(string)
step_num = n - m + 1
bcmap = build_bad_char_map(pattern_s)
prefix, suffix = build_good_suffix_prefix_map(pattern_s)
i = 0
while i < step_num:
j = m - 1
while j >= 0 and string[i + j] == pattern_s[j]:
j -= 1
if j < 0:
return i
x = (j - bcmap[ord(string[i + j])])
y = 0
if j < (m - 1):
y = move_forward_by_gsp(j, m, suffix, prefix)
i += max(x, y)
return -1
if __name__ == '__main__':
string = "hello world! today is not my day"
ps = 'today'
print(string)
print(len(string))
print(bm(string, ps))
|
Add BM string search algorithm implementation
|
Add BM string search algorithm implementation
|
Python
|
apache-2.0
|
free-free/algorithm,free-free/algorithm
|
Add BM string search algorithm implementation
|
# -*- coding:utf-8 -*-
def build_bad_char_map(pattern_s, sz=256):
bcmap = [-1] * sz
for i,s in enumerate(pattern_s):
bcmap[ord(s)] = i
return bcmap
def build_good_suffix_prefix_map(pattern_s):
m = len(pattern_s)
prefix, suffix = [False] * m, [-1] * m
for i in range(m - 1):
j = i
k = 0
while j >= 0 and pattern_s[j] == pattern_s[m - k - 1]:
k += 1
suffix[k] = j
j -= 1
if j < 0:
prefix[k] = True
return prefix, suffix
def move_forward_by_gsp(bc_p, m, suffix, prefix):
k = m - 1 - j
if suffix[k] != -1:
return j - suffix[k] + 1
r = j + 2
while r < m:
if prefix[m - r] == True:
return r
def bm(string, pattern_s):
m = len(pattern_s)
n = len(string)
step_num = n - m + 1
bcmap = build_bad_char_map(pattern_s)
prefix, suffix = build_good_suffix_prefix_map(pattern_s)
i = 0
while i < step_num:
j = m - 1
while j >= 0 and string[i + j] == pattern_s[j]:
j -= 1
if j < 0:
return i
x = (j - bcmap[ord(string[i + j])])
y = 0
if j < (m - 1):
y = move_forward_by_gsp(j, m, suffix, prefix)
i += max(x, y)
return -1
if __name__ == '__main__':
string = "hello world! today is not my day"
ps = 'today'
print(string)
print(len(string))
print(bm(string, ps))
|
<commit_before><commit_msg>Add BM string search algorithm implementation<commit_after>
|
# -*- coding:utf-8 -*-
def build_bad_char_map(pattern_s, sz=256):
bcmap = [-1] * sz
for i,s in enumerate(pattern_s):
bcmap[ord(s)] = i
return bcmap
def build_good_suffix_prefix_map(pattern_s):
m = len(pattern_s)
prefix, suffix = [False] * m, [-1] * m
for i in range(m - 1):
j = i
k = 0
while j >= 0 and pattern_s[j] == pattern_s[m - k - 1]:
k += 1
suffix[k] = j
j -= 1
if j < 0:
prefix[k] = True
return prefix, suffix
def move_forward_by_gsp(bc_p, m, suffix, prefix):
k = m - 1 - j
if suffix[k] != -1:
return j - suffix[k] + 1
r = j + 2
while r < m:
if prefix[m - r] == True:
return r
def bm(string, pattern_s):
m = len(pattern_s)
n = len(string)
step_num = n - m + 1
bcmap = build_bad_char_map(pattern_s)
prefix, suffix = build_good_suffix_prefix_map(pattern_s)
i = 0
while i < step_num:
j = m - 1
while j >= 0 and string[i + j] == pattern_s[j]:
j -= 1
if j < 0:
return i
x = (j - bcmap[ord(string[i + j])])
y = 0
if j < (m - 1):
y = move_forward_by_gsp(j, m, suffix, prefix)
i += max(x, y)
return -1
if __name__ == '__main__':
string = "hello world! today is not my day"
ps = 'today'
print(string)
print(len(string))
print(bm(string, ps))
|
Add BM string search algorithm implementation# -*- coding:utf-8 -*-
def build_bad_char_map(pattern_s, sz=256):
bcmap = [-1] * sz
for i,s in enumerate(pattern_s):
bcmap[ord(s)] = i
return bcmap
def build_good_suffix_prefix_map(pattern_s):
m = len(pattern_s)
prefix, suffix = [False] * m, [-1] * m
for i in range(m - 1):
j = i
k = 0
while j >= 0 and pattern_s[j] == pattern_s[m - k - 1]:
k += 1
suffix[k] = j
j -= 1
if j < 0:
prefix[k] = True
return prefix, suffix
def move_forward_by_gsp(bc_p, m, suffix, prefix):
k = m - 1 - j
if suffix[k] != -1:
return j - suffix[k] + 1
r = j + 2
while r < m:
if prefix[m - r] == True:
return r
def bm(string, pattern_s):
m = len(pattern_s)
n = len(string)
step_num = n - m + 1
bcmap = build_bad_char_map(pattern_s)
prefix, suffix = build_good_suffix_prefix_map(pattern_s)
i = 0
while i < step_num:
j = m - 1
while j >= 0 and string[i + j] == pattern_s[j]:
j -= 1
if j < 0:
return i
x = (j - bcmap[ord(string[i + j])])
y = 0
if j < (m - 1):
y = move_forward_by_gsp(j, m, suffix, prefix)
i += max(x, y)
return -1
if __name__ == '__main__':
string = "hello world! today is not my day"
ps = 'today'
print(string)
print(len(string))
print(bm(string, ps))
|
<commit_before><commit_msg>Add BM string search algorithm implementation<commit_after># -*- coding:utf-8 -*-
def build_bad_char_map(pattern_s, sz=256):
bcmap = [-1] * sz
for i,s in enumerate(pattern_s):
bcmap[ord(s)] = i
return bcmap
def build_good_suffix_prefix_map(pattern_s):
m = len(pattern_s)
prefix, suffix = [False] * m, [-1] * m
for i in range(m - 1):
j = i
k = 0
while j >= 0 and pattern_s[j] == pattern_s[m - k - 1]:
k += 1
suffix[k] = j
j -= 1
if j < 0:
prefix[k] = True
return prefix, suffix
def move_forward_by_gsp(bc_p, m, suffix, prefix):
k = m - 1 - j
if suffix[k] != -1:
return j - suffix[k] + 1
r = j + 2
while r < m:
if prefix[m - r] == True:
return r
def bm(string, pattern_s):
m = len(pattern_s)
n = len(string)
step_num = n - m + 1
bcmap = build_bad_char_map(pattern_s)
prefix, suffix = build_good_suffix_prefix_map(pattern_s)
i = 0
while i < step_num:
j = m - 1
while j >= 0 and string[i + j] == pattern_s[j]:
j -= 1
if j < 0:
return i
x = (j - bcmap[ord(string[i + j])])
y = 0
if j < (m - 1):
y = move_forward_by_gsp(j, m, suffix, prefix)
i += max(x, y)
return -1
if __name__ == '__main__':
string = "hello world! today is not my day"
ps = 'today'
print(string)
print(len(string))
print(bm(string, ps))
|
|
cc8e9b9576cf08d7a91508e8f629104f5fef0c54
|
lib/repo/git_hooks/update.d/02-block_change_top_level_folders.py
|
lib/repo/git_hooks/update.d/02-block_change_top_level_folders.py
|
#!/usr/bin/env python3
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout: # there are changes to the top level files/directories
log = subprocess.run(['git', 'log', '--format=format:%ae', '{}...{}'.format(old_commit, new_commit)],
stdout=subprocess.PIPE, universal_newlines=True)
for email in log.stdout.splitlines():
if email != 'markus@markus.com': # user change
# TODO Add signature for markus commits and verify it here
print('Modifying top level files and directories is not allowed!')
exit(1)
|
Add hook to prevent student changes to top level files/dirs
|
git: Add hook to prevent student changes to top level files/dirs
|
Python
|
mit
|
MarkUsProject/Markus,benjaminvialle/Markus,benjaminvialle/Markus,benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus,MarkUsProject/Markus,benjaminvialle/Markus,MarkUsProject/Markus,benjaminvialle/Markus,benjaminvialle/Markus,MarkUsProject/Markus,benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus
|
git: Add hook to prevent student changes to top level files/dirs
|
#!/usr/bin/env python3
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout: # there are changes to the top level files/directories
log = subprocess.run(['git', 'log', '--format=format:%ae', '{}...{}'.format(old_commit, new_commit)],
stdout=subprocess.PIPE, universal_newlines=True)
for email in log.stdout.splitlines():
if email != 'markus@markus.com': # user change
# TODO Add signature for markus commits and verify it here
print('Modifying top level files and directories is not allowed!')
exit(1)
|
<commit_before><commit_msg>git: Add hook to prevent student changes to top level files/dirs<commit_after>
|
#!/usr/bin/env python3
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout: # there are changes to the top level files/directories
log = subprocess.run(['git', 'log', '--format=format:%ae', '{}...{}'.format(old_commit, new_commit)],
stdout=subprocess.PIPE, universal_newlines=True)
for email in log.stdout.splitlines():
if email != 'markus@markus.com': # user change
# TODO Add signature for markus commits and verify it here
print('Modifying top level files and directories is not allowed!')
exit(1)
|
git: Add hook to prevent student changes to top level files/dirs#!/usr/bin/env python3
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout: # there are changes to the top level files/directories
log = subprocess.run(['git', 'log', '--format=format:%ae', '{}...{}'.format(old_commit, new_commit)],
stdout=subprocess.PIPE, universal_newlines=True)
for email in log.stdout.splitlines():
if email != 'markus@markus.com': # user change
# TODO Add signature for markus commits and verify it here
print('Modifying top level files and directories is not allowed!')
exit(1)
|
<commit_before><commit_msg>git: Add hook to prevent student changes to top level files/dirs<commit_after>#!/usr/bin/env python3
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout: # there are changes to the top level files/directories
log = subprocess.run(['git', 'log', '--format=format:%ae', '{}...{}'.format(old_commit, new_commit)],
stdout=subprocess.PIPE, universal_newlines=True)
for email in log.stdout.splitlines():
if email != 'markus@markus.com': # user change
# TODO Add signature for markus commits and verify it here
print('Modifying top level files and directories is not allowed!')
exit(1)
|
|
1785b211a017b25b2b180a042b58a15ae0ff7d7a
|
neutron/tests/unit/conf/policies/test_network_ip_availability.py
|
neutron/tests/unit/conf/policies/test_network_ip_availability.py
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class NetworkIPAvailabilityAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(NetworkIPAvailabilityAPITestCase, self).setUp()
self.target = {}
class SystemAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_network_ip_availability(self):
self.assertTrue(
policy.enforce(self.context, 'get_network_ip_availability',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemMemberTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_member_ctx
class ProjectAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.context = self.project_admin_ctx
def test_get_network_ip_availability(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_network_ip_availability', self.target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectMemberTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
Add tests for Network IP availability API's new policy rules
|
Add tests for Network IP availability API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I21a016a6aa58dc78fc67af093933caaf94991fbd
|
Python
|
apache-2.0
|
openstack/neutron,openstack/neutron,mahak/neutron,mahak/neutron,openstack/neutron,mahak/neutron
|
Add tests for Network IP availability API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I21a016a6aa58dc78fc67af093933caaf94991fbd
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class NetworkIPAvailabilityAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(NetworkIPAvailabilityAPITestCase, self).setUp()
self.target = {}
class SystemAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_network_ip_availability(self):
self.assertTrue(
policy.enforce(self.context, 'get_network_ip_availability',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemMemberTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_member_ctx
class ProjectAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.context = self.project_admin_ctx
def test_get_network_ip_availability(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_network_ip_availability', self.target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectMemberTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
<commit_before><commit_msg>Add tests for Network IP availability API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I21a016a6aa58dc78fc67af093933caaf94991fbd<commit_after>
|
# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class NetworkIPAvailabilityAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(NetworkIPAvailabilityAPITestCase, self).setUp()
self.target = {}
class SystemAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_network_ip_availability(self):
self.assertTrue(
policy.enforce(self.context, 'get_network_ip_availability',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemMemberTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_member_ctx
class ProjectAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.context = self.project_admin_ctx
def test_get_network_ip_availability(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_network_ip_availability', self.target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectMemberTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
Add tests for Network IP availability API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I21a016a6aa58dc78fc67af093933caaf94991fbd# Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class NetworkIPAvailabilityAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(NetworkIPAvailabilityAPITestCase, self).setUp()
self.target = {}
class SystemAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_network_ip_availability(self):
self.assertTrue(
policy.enforce(self.context, 'get_network_ip_availability',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemMemberTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_member_ctx
class ProjectAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.context = self.project_admin_ctx
def test_get_network_ip_availability(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_network_ip_availability', self.target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectMemberTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
<commit_before><commit_msg>Add tests for Network IP availability API's new policy rules
Related-blueprint: bp/secure-rbac-roles
Change-Id: I21a016a6aa58dc78fc67af093933caaf94991fbd<commit_after># Copyright (c) 2021 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_policy import policy as base_policy
from neutron import policy
from neutron.tests.unit.conf.policies import base
class NetworkIPAvailabilityAPITestCase(base.PolicyBaseTestCase):
def setUp(self):
super(NetworkIPAvailabilityAPITestCase, self).setUp()
self.target = {}
class SystemAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.context = self.system_admin_ctx
def test_get_network_ip_availability(self):
self.assertTrue(
policy.enforce(self.context, 'get_network_ip_availability',
self.target))
class SystemMemberTests(SystemAdminTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.context = self.system_member_ctx
class SystemReaderTests(SystemMemberTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.context = self.system_member_ctx
class ProjectAdminTests(NetworkIPAvailabilityAPITestCase):
def setUp(self):
super(ProjectAdminTests, self).setUp()
self.context = self.project_admin_ctx
def test_get_network_ip_availability(self):
self.assertRaises(
base_policy.PolicyNotAuthorized,
policy.enforce,
self.context, 'get_network_ip_availability', self.target)
class ProjectMemberTests(ProjectAdminTests):
def setUp(self):
super(ProjectMemberTests, self).setUp()
self.context = self.project_member_ctx
class ProjectReaderTests(ProjectMemberTests):
def setUp(self):
super(ProjectReaderTests, self).setUp()
self.context = self.project_reader_ctx
|
|
914cb7d5eac4598474c3473568ec9705f5485dae
|
split-training.py
|
split-training.py
|
import os
import random
import shutil
train_split_percent, validation_split_percent, test_split_percentage = 70, 20, 10
dataset_dir = '/Users/tom/projects-workspace/set-game/data/train-v2/labelled'
target_dir = '/Users/tom/tmp/set'
dirs = []
for (dirpath, dirnames, filenames) in os.walk(dataset_dir):
dirs.extend(dirnames)
break
os.mkdir(target_dir)
target_train_dir = os.path.join(target_dir, 'train')
target_validation_dir = os.path.join(target_dir, 'validation')
target_test_dir = os.path.join(target_dir, 'test')
os.mkdir(target_train_dir)
os.mkdir(target_validation_dir)
os.mkdir(target_test_dir)
colours = ('green', 'purple', 'red')
for label in colours:
os.mkdir(os.path.join(target_dir, 'train', label))
os.mkdir(os.path.join(target_dir, 'validation', label))
os.mkdir(os.path.join(target_dir, 'test', label))
for dir in dirs:
files = os.listdir(os.path.join(dataset_dir, dir))
random.shuffle(files)
i1 = int(len(files) * train_split_percent / 100)
i2 = int(len(files) * (train_split_percent + validation_split_percent) / 100)
train, validation, test = files[:i1], files[i1:i2], files[i2:]
label = dir.split('-')[1] # colour
for file in train:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_train_dir, label, file))
for file in validation:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_validation_dir, label, file))
for file in test:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_test_dir, label, file))
|
Add script to split data into train/validation/test datasets
|
Add script to split data into train/validation/test datasets
|
Python
|
apache-2.0
|
tomwhite/set-game,tomwhite/set-game,tomwhite/set-game
|
Add script to split data into train/validation/test datasets
|
import os
import random
import shutil
train_split_percent, validation_split_percent, test_split_percentage = 70, 20, 10
dataset_dir = '/Users/tom/projects-workspace/set-game/data/train-v2/labelled'
target_dir = '/Users/tom/tmp/set'
dirs = []
for (dirpath, dirnames, filenames) in os.walk(dataset_dir):
dirs.extend(dirnames)
break
os.mkdir(target_dir)
target_train_dir = os.path.join(target_dir, 'train')
target_validation_dir = os.path.join(target_dir, 'validation')
target_test_dir = os.path.join(target_dir, 'test')
os.mkdir(target_train_dir)
os.mkdir(target_validation_dir)
os.mkdir(target_test_dir)
colours = ('green', 'purple', 'red')
for label in colours:
os.mkdir(os.path.join(target_dir, 'train', label))
os.mkdir(os.path.join(target_dir, 'validation', label))
os.mkdir(os.path.join(target_dir, 'test', label))
for dir in dirs:
files = os.listdir(os.path.join(dataset_dir, dir))
random.shuffle(files)
i1 = int(len(files) * train_split_percent / 100)
i2 = int(len(files) * (train_split_percent + validation_split_percent) / 100)
train, validation, test = files[:i1], files[i1:i2], files[i2:]
label = dir.split('-')[1] # colour
for file in train:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_train_dir, label, file))
for file in validation:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_validation_dir, label, file))
for file in test:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_test_dir, label, file))
|
<commit_before><commit_msg>Add script to split data into train/validation/test datasets<commit_after>
|
import os
import random
import shutil
train_split_percent, validation_split_percent, test_split_percentage = 70, 20, 10
dataset_dir = '/Users/tom/projects-workspace/set-game/data/train-v2/labelled'
target_dir = '/Users/tom/tmp/set'
dirs = []
for (dirpath, dirnames, filenames) in os.walk(dataset_dir):
dirs.extend(dirnames)
break
os.mkdir(target_dir)
target_train_dir = os.path.join(target_dir, 'train')
target_validation_dir = os.path.join(target_dir, 'validation')
target_test_dir = os.path.join(target_dir, 'test')
os.mkdir(target_train_dir)
os.mkdir(target_validation_dir)
os.mkdir(target_test_dir)
colours = ('green', 'purple', 'red')
for label in colours:
os.mkdir(os.path.join(target_dir, 'train', label))
os.mkdir(os.path.join(target_dir, 'validation', label))
os.mkdir(os.path.join(target_dir, 'test', label))
for dir in dirs:
files = os.listdir(os.path.join(dataset_dir, dir))
random.shuffle(files)
i1 = int(len(files) * train_split_percent / 100)
i2 = int(len(files) * (train_split_percent + validation_split_percent) / 100)
train, validation, test = files[:i1], files[i1:i2], files[i2:]
label = dir.split('-')[1] # colour
for file in train:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_train_dir, label, file))
for file in validation:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_validation_dir, label, file))
for file in test:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_test_dir, label, file))
|
Add script to split data into train/validation/test datasetsimport os
import random
import shutil
train_split_percent, validation_split_percent, test_split_percentage = 70, 20, 10
dataset_dir = '/Users/tom/projects-workspace/set-game/data/train-v2/labelled'
target_dir = '/Users/tom/tmp/set'
dirs = []
for (dirpath, dirnames, filenames) in os.walk(dataset_dir):
dirs.extend(dirnames)
break
os.mkdir(target_dir)
target_train_dir = os.path.join(target_dir, 'train')
target_validation_dir = os.path.join(target_dir, 'validation')
target_test_dir = os.path.join(target_dir, 'test')
os.mkdir(target_train_dir)
os.mkdir(target_validation_dir)
os.mkdir(target_test_dir)
colours = ('green', 'purple', 'red')
for label in colours:
os.mkdir(os.path.join(target_dir, 'train', label))
os.mkdir(os.path.join(target_dir, 'validation', label))
os.mkdir(os.path.join(target_dir, 'test', label))
for dir in dirs:
files = os.listdir(os.path.join(dataset_dir, dir))
random.shuffle(files)
i1 = int(len(files) * train_split_percent / 100)
i2 = int(len(files) * (train_split_percent + validation_split_percent) / 100)
train, validation, test = files[:i1], files[i1:i2], files[i2:]
label = dir.split('-')[1] # colour
for file in train:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_train_dir, label, file))
for file in validation:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_validation_dir, label, file))
for file in test:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_test_dir, label, file))
|
<commit_before><commit_msg>Add script to split data into train/validation/test datasets<commit_after>import os
import random
import shutil
train_split_percent, validation_split_percent, test_split_percentage = 70, 20, 10
dataset_dir = '/Users/tom/projects-workspace/set-game/data/train-v2/labelled'
target_dir = '/Users/tom/tmp/set'
dirs = []
for (dirpath, dirnames, filenames) in os.walk(dataset_dir):
dirs.extend(dirnames)
break
os.mkdir(target_dir)
target_train_dir = os.path.join(target_dir, 'train')
target_validation_dir = os.path.join(target_dir, 'validation')
target_test_dir = os.path.join(target_dir, 'test')
os.mkdir(target_train_dir)
os.mkdir(target_validation_dir)
os.mkdir(target_test_dir)
colours = ('green', 'purple', 'red')
for label in colours:
os.mkdir(os.path.join(target_dir, 'train', label))
os.mkdir(os.path.join(target_dir, 'validation', label))
os.mkdir(os.path.join(target_dir, 'test', label))
for dir in dirs:
files = os.listdir(os.path.join(dataset_dir, dir))
random.shuffle(files)
i1 = int(len(files) * train_split_percent / 100)
i2 = int(len(files) * (train_split_percent + validation_split_percent) / 100)
train, validation, test = files[:i1], files[i1:i2], files[i2:]
label = dir.split('-')[1] # colour
for file in train:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_train_dir, label, file))
for file in validation:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_validation_dir, label, file))
for file in test:
shutil.copyfile(os.path.join(dataset_dir, dir, file), os.path.join(target_test_dir, label, file))
|
|
152f9a2410bc3714ca8c9395295ff67f30ebd548
|
test_SR1d.py
|
test_SR1d.py
|
import eos_defns
import SR1d
from numpy.testing import assert_allclose
def test_standard_sod():
"""
Relativistic Sod test.
Numbers are taken from the General Matlab code, so accuracy isn't perfect.
"""
eos = eos_defns.eos_gamma_law(5.0/3.0)
w_left = SR1d.State(1.0, 0.0, 1.5, eos, label="L")
w_right = SR1d.State(0.125, 0.0, 1.2, eos, label="R")
rp = SR1d.RP(w_left, w_right)
p_star_matlab = 0.308909954203586
assert_allclose(rp.p_star, p_star_matlab, rtol=1e-6)
rarefaction_speeds_matlab = [-0.690065559342354, -0.277995552140227]
assert_allclose(rp.waves[0].wave_speed, rarefaction_speeds_matlab, rtol=1e-6)
shock_speed_matlab = 0.818591417744604
assert_allclose(rp.waves[2].wave_speed, shock_speed_matlab, rtol=1e-6)
|
Add the start of a test of the RP class.
|
Add the start of a test of the RP class.
|
Python
|
mit
|
harpolea/r3d2
|
Add the start of a test of the RP class.
|
import eos_defns
import SR1d
from numpy.testing import assert_allclose
def test_standard_sod():
"""
Relativistic Sod test.
Numbers are taken from the General Matlab code, so accuracy isn't perfect.
"""
eos = eos_defns.eos_gamma_law(5.0/3.0)
w_left = SR1d.State(1.0, 0.0, 1.5, eos, label="L")
w_right = SR1d.State(0.125, 0.0, 1.2, eos, label="R")
rp = SR1d.RP(w_left, w_right)
p_star_matlab = 0.308909954203586
assert_allclose(rp.p_star, p_star_matlab, rtol=1e-6)
rarefaction_speeds_matlab = [-0.690065559342354, -0.277995552140227]
assert_allclose(rp.waves[0].wave_speed, rarefaction_speeds_matlab, rtol=1e-6)
shock_speed_matlab = 0.818591417744604
assert_allclose(rp.waves[2].wave_speed, shock_speed_matlab, rtol=1e-6)
|
<commit_before><commit_msg>Add the start of a test of the RP class.<commit_after>
|
import eos_defns
import SR1d
from numpy.testing import assert_allclose
def test_standard_sod():
"""
Relativistic Sod test.
Numbers are taken from the General Matlab code, so accuracy isn't perfect.
"""
eos = eos_defns.eos_gamma_law(5.0/3.0)
w_left = SR1d.State(1.0, 0.0, 1.5, eos, label="L")
w_right = SR1d.State(0.125, 0.0, 1.2, eos, label="R")
rp = SR1d.RP(w_left, w_right)
p_star_matlab = 0.308909954203586
assert_allclose(rp.p_star, p_star_matlab, rtol=1e-6)
rarefaction_speeds_matlab = [-0.690065559342354, -0.277995552140227]
assert_allclose(rp.waves[0].wave_speed, rarefaction_speeds_matlab, rtol=1e-6)
shock_speed_matlab = 0.818591417744604
assert_allclose(rp.waves[2].wave_speed, shock_speed_matlab, rtol=1e-6)
|
Add the start of a test of the RP class.import eos_defns
import SR1d
from numpy.testing import assert_allclose
def test_standard_sod():
"""
Relativistic Sod test.
Numbers are taken from the General Matlab code, so accuracy isn't perfect.
"""
eos = eos_defns.eos_gamma_law(5.0/3.0)
w_left = SR1d.State(1.0, 0.0, 1.5, eos, label="L")
w_right = SR1d.State(0.125, 0.0, 1.2, eos, label="R")
rp = SR1d.RP(w_left, w_right)
p_star_matlab = 0.308909954203586
assert_allclose(rp.p_star, p_star_matlab, rtol=1e-6)
rarefaction_speeds_matlab = [-0.690065559342354, -0.277995552140227]
assert_allclose(rp.waves[0].wave_speed, rarefaction_speeds_matlab, rtol=1e-6)
shock_speed_matlab = 0.818591417744604
assert_allclose(rp.waves[2].wave_speed, shock_speed_matlab, rtol=1e-6)
|
<commit_before><commit_msg>Add the start of a test of the RP class.<commit_after>import eos_defns
import SR1d
from numpy.testing import assert_allclose
def test_standard_sod():
"""
Relativistic Sod test.
Numbers are taken from the General Matlab code, so accuracy isn't perfect.
"""
eos = eos_defns.eos_gamma_law(5.0/3.0)
w_left = SR1d.State(1.0, 0.0, 1.5, eos, label="L")
w_right = SR1d.State(0.125, 0.0, 1.2, eos, label="R")
rp = SR1d.RP(w_left, w_right)
p_star_matlab = 0.308909954203586
assert_allclose(rp.p_star, p_star_matlab, rtol=1e-6)
rarefaction_speeds_matlab = [-0.690065559342354, -0.277995552140227]
assert_allclose(rp.waves[0].wave_speed, rarefaction_speeds_matlab, rtol=1e-6)
shock_speed_matlab = 0.818591417744604
assert_allclose(rp.waves[2].wave_speed, shock_speed_matlab, rtol=1e-6)
|
|
5244c06403aa82c920b742cf22e60adc34c72295
|
Scripts/get-my-ip.py
|
Scripts/get-my-ip.py
|
#!/usr/bin/env python
"""Get your public IP address from a UDP socket connection
"""
import socket as _socket
def get_my_ip(host, port=80):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM)
try:
s.connect((host, port))
return s.getsockname()[0]
finally:
s.close()
if __name__ == '__main__':
import argparse as _argparse
parser = _argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'host', default='software-carpentry.org', nargs='?')
parser.add_argument(
'port', default=80, type=int, nargs='?')
args = parser.parse_args()
print(get_my_ip(host=args.host, port=args.port))
|
Add small script to get public ip
|
Add small script to get public ip
|
Python
|
bsd-3-clause
|
eddiejessup/ciabatta
|
Add small script to get public ip
|
#!/usr/bin/env python
"""Get your public IP address from a UDP socket connection
"""
import socket as _socket
def get_my_ip(host, port=80):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM)
try:
s.connect((host, port))
return s.getsockname()[0]
finally:
s.close()
if __name__ == '__main__':
import argparse as _argparse
parser = _argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'host', default='software-carpentry.org', nargs='?')
parser.add_argument(
'port', default=80, type=int, nargs='?')
args = parser.parse_args()
print(get_my_ip(host=args.host, port=args.port))
|
<commit_before><commit_msg>Add small script to get public ip<commit_after>
|
#!/usr/bin/env python
"""Get your public IP address from a UDP socket connection
"""
import socket as _socket
def get_my_ip(host, port=80):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM)
try:
s.connect((host, port))
return s.getsockname()[0]
finally:
s.close()
if __name__ == '__main__':
import argparse as _argparse
parser = _argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'host', default='software-carpentry.org', nargs='?')
parser.add_argument(
'port', default=80, type=int, nargs='?')
args = parser.parse_args()
print(get_my_ip(host=args.host, port=args.port))
|
Add small script to get public ip#!/usr/bin/env python
"""Get your public IP address from a UDP socket connection
"""
import socket as _socket
def get_my_ip(host, port=80):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM)
try:
s.connect((host, port))
return s.getsockname()[0]
finally:
s.close()
if __name__ == '__main__':
import argparse as _argparse
parser = _argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'host', default='software-carpentry.org', nargs='?')
parser.add_argument(
'port', default=80, type=int, nargs='?')
args = parser.parse_args()
print(get_my_ip(host=args.host, port=args.port))
|
<commit_before><commit_msg>Add small script to get public ip<commit_after>#!/usr/bin/env python
"""Get your public IP address from a UDP socket connection
"""
import socket as _socket
def get_my_ip(host, port=80):
s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM)
try:
s.connect((host, port))
return s.getsockname()[0]
finally:
s.close()
if __name__ == '__main__':
import argparse as _argparse
parser = _argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'host', default='software-carpentry.org', nargs='?')
parser.add_argument(
'port', default=80, type=int, nargs='?')
args = parser.parse_args()
print(get_my_ip(host=args.host, port=args.port))
|
|
b8572aa26114bd9792b83dfb9187e466537303fa
|
stickord/commands/games.py
|
stickord/commands/games.py
|
import discord
from stickord.registry import Command
@Command(['tellen', 'tel'])
async def counting(cont, mesg, client, *args, **kwargs):
count, auth, when = (0, discord.User, None)
if cont:
try:
num = int(cont[0])
if num != count + 1:
return f'Whoops, you done goof! you should have entered "{cnt+1}" but you entered "{num}"'
else if mesg.author.id == auth.id:
return f'You can\'t submit a number twice in a row! Shame on you {mesg.author.mention}!'
else:
#save num
await client.add_reaction(mesg, '\U0001f44c')
except ValueError:
return 'Entered number was not valid.'
|
Add framework for counting command
|
Add framework for counting command
|
Python
|
mit
|
RobinSikkens/Sticky-discord
|
Add framework for counting command
|
import discord
from stickord.registry import Command
@Command(['tellen', 'tel'])
async def counting(cont, mesg, client, *args, **kwargs):
count, auth, when = (0, discord.User, None)
if cont:
try:
num = int(cont[0])
if num != count + 1:
return f'Whoops, you done goof! you should have entered "{cnt+1}" but you entered "{num}"'
else if mesg.author.id == auth.id:
return f'You can\'t submit a number twice in a row! Shame on you {mesg.author.mention}!'
else:
#save num
await client.add_reaction(mesg, '\U0001f44c')
except ValueError:
return 'Entered number was not valid.'
|
<commit_before><commit_msg>Add framework for counting command<commit_after>
|
import discord
from stickord.registry import Command
@Command(['tellen', 'tel'])
async def counting(cont, mesg, client, *args, **kwargs):
count, auth, when = (0, discord.User, None)
if cont:
try:
num = int(cont[0])
if num != count + 1:
return f'Whoops, you done goof! you should have entered "{cnt+1}" but you entered "{num}"'
else if mesg.author.id == auth.id:
return f'You can\'t submit a number twice in a row! Shame on you {mesg.author.mention}!'
else:
#save num
await client.add_reaction(mesg, '\U0001f44c')
except ValueError:
return 'Entered number was not valid.'
|
Add framework for counting commandimport discord
from stickord.registry import Command
@Command(['tellen', 'tel'])
async def counting(cont, mesg, client, *args, **kwargs):
count, auth, when = (0, discord.User, None)
if cont:
try:
num = int(cont[0])
if num != count + 1:
return f'Whoops, you done goof! you should have entered "{cnt+1}" but you entered "{num}"'
else if mesg.author.id == auth.id:
return f'You can\'t submit a number twice in a row! Shame on you {mesg.author.mention}!'
else:
#save num
await client.add_reaction(mesg, '\U0001f44c')
except ValueError:
return 'Entered number was not valid.'
|
<commit_before><commit_msg>Add framework for counting command<commit_after>import discord
from stickord.registry import Command
@Command(['tellen', 'tel'])
async def counting(cont, mesg, client, *args, **kwargs):
count, auth, when = (0, discord.User, None)
if cont:
try:
num = int(cont[0])
if num != count + 1:
return f'Whoops, you done goof! you should have entered "{cnt+1}" but you entered "{num}"'
else if mesg.author.id == auth.id:
return f'You can\'t submit a number twice in a row! Shame on you {mesg.author.mention}!'
else:
#save num
await client.add_reaction(mesg, '\U0001f44c')
except ValueError:
return 'Entered number was not valid.'
|
|
d18b86de344ea720451feece2ebbaee8b0eb3d1e
|
scripts/download_archive.py
|
scripts/download_archive.py
|
"""A script to download slack archives."""
from os.path import abspath, dirname, join
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import yaml
HERE = dirname(abspath(__file__))
CONFIG_PATH = join(HERE, '..', 'parktain', 'config.yaml')
with open(CONFIG_PATH, 'r') as ymlfile:
slack = yaml.load(ymlfile).get('slack')
def wait_for_download_completion(browser):
browser.visit("chrome://downloads/")
# FIXME: Figure out what element needs to diappear/appear
import time
time.sleep(30)
with Browser('chrome') as browser:
# Visit URL
url = 'https://my.slack.com/services/export'
browser.visit(url)
browser.fill('domain', slack['domain'])
browser.click_link_by_id('submit_team_domain')
browser.fill('email', slack['email'])
browser.fill('password', slack['password'])
browser.click_link_by_id('signin_btn')
try:
button = browser.find_by_text('Start Export')[0]
button.click()
except ElementDoesNotExist:
pass
try:
link = browser.find_link_by_partial_text('Ready for download')[0]
link.click()
wait_for_download_completion(browser)
except ElementDoesNotExist:
print('Could not download export file')
|
Add script to download slack archives
|
Add script to download slack archives
|
Python
|
bsd-3-clause
|
punchagan/parktain,punchagan/parktain,punchagan/parktain
|
Add script to download slack archives
|
"""A script to download slack archives."""
from os.path import abspath, dirname, join
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import yaml
HERE = dirname(abspath(__file__))
CONFIG_PATH = join(HERE, '..', 'parktain', 'config.yaml')
with open(CONFIG_PATH, 'r') as ymlfile:
slack = yaml.load(ymlfile).get('slack')
def wait_for_download_completion(browser):
browser.visit("chrome://downloads/")
# FIXME: Figure out what element needs to diappear/appear
import time
time.sleep(30)
with Browser('chrome') as browser:
# Visit URL
url = 'https://my.slack.com/services/export'
browser.visit(url)
browser.fill('domain', slack['domain'])
browser.click_link_by_id('submit_team_domain')
browser.fill('email', slack['email'])
browser.fill('password', slack['password'])
browser.click_link_by_id('signin_btn')
try:
button = browser.find_by_text('Start Export')[0]
button.click()
except ElementDoesNotExist:
pass
try:
link = browser.find_link_by_partial_text('Ready for download')[0]
link.click()
wait_for_download_completion(browser)
except ElementDoesNotExist:
print('Could not download export file')
|
<commit_before><commit_msg>Add script to download slack archives<commit_after>
|
"""A script to download slack archives."""
from os.path import abspath, dirname, join
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import yaml
HERE = dirname(abspath(__file__))
CONFIG_PATH = join(HERE, '..', 'parktain', 'config.yaml')
with open(CONFIG_PATH, 'r') as ymlfile:
slack = yaml.load(ymlfile).get('slack')
def wait_for_download_completion(browser):
browser.visit("chrome://downloads/")
# FIXME: Figure out what element needs to diappear/appear
import time
time.sleep(30)
with Browser('chrome') as browser:
# Visit URL
url = 'https://my.slack.com/services/export'
browser.visit(url)
browser.fill('domain', slack['domain'])
browser.click_link_by_id('submit_team_domain')
browser.fill('email', slack['email'])
browser.fill('password', slack['password'])
browser.click_link_by_id('signin_btn')
try:
button = browser.find_by_text('Start Export')[0]
button.click()
except ElementDoesNotExist:
pass
try:
link = browser.find_link_by_partial_text('Ready for download')[0]
link.click()
wait_for_download_completion(browser)
except ElementDoesNotExist:
print('Could not download export file')
|
Add script to download slack archives"""A script to download slack archives."""
from os.path import abspath, dirname, join
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import yaml
HERE = dirname(abspath(__file__))
CONFIG_PATH = join(HERE, '..', 'parktain', 'config.yaml')
with open(CONFIG_PATH, 'r') as ymlfile:
slack = yaml.load(ymlfile).get('slack')
def wait_for_download_completion(browser):
browser.visit("chrome://downloads/")
# FIXME: Figure out what element needs to diappear/appear
import time
time.sleep(30)
with Browser('chrome') as browser:
# Visit URL
url = 'https://my.slack.com/services/export'
browser.visit(url)
browser.fill('domain', slack['domain'])
browser.click_link_by_id('submit_team_domain')
browser.fill('email', slack['email'])
browser.fill('password', slack['password'])
browser.click_link_by_id('signin_btn')
try:
button = browser.find_by_text('Start Export')[0]
button.click()
except ElementDoesNotExist:
pass
try:
link = browser.find_link_by_partial_text('Ready for download')[0]
link.click()
wait_for_download_completion(browser)
except ElementDoesNotExist:
print('Could not download export file')
|
<commit_before><commit_msg>Add script to download slack archives<commit_after>"""A script to download slack archives."""
from os.path import abspath, dirname, join
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import yaml
HERE = dirname(abspath(__file__))
CONFIG_PATH = join(HERE, '..', 'parktain', 'config.yaml')
with open(CONFIG_PATH, 'r') as ymlfile:
slack = yaml.load(ymlfile).get('slack')
def wait_for_download_completion(browser):
browser.visit("chrome://downloads/")
# FIXME: Figure out what element needs to diappear/appear
import time
time.sleep(30)
with Browser('chrome') as browser:
# Visit URL
url = 'https://my.slack.com/services/export'
browser.visit(url)
browser.fill('domain', slack['domain'])
browser.click_link_by_id('submit_team_domain')
browser.fill('email', slack['email'])
browser.fill('password', slack['password'])
browser.click_link_by_id('signin_btn')
try:
button = browser.find_by_text('Start Export')[0]
button.click()
except ElementDoesNotExist:
pass
try:
link = browser.find_link_by_partial_text('Ready for download')[0]
link.click()
wait_for_download_completion(browser)
except ElementDoesNotExist:
print('Could not download export file')
|
|
3545caa05b5ef9a123b438884c2a32989fa47319
|
plugins/BidirectionalSynapseCleanup.py
|
plugins/BidirectionalSynapseCleanup.py
|
from tulip import *
import tulipplugins
import tulippaths as tp
from FileOutputPlugin import FileOutputPlugin
class BidirectionalSynapseCleanup(FileOutputPlugin):
def __init__(self, context):
FileOutputPlugin.__init__(self, context)
self._edgeTypeLabel = "Edge type"
self.addStringParameter(self._edgeTypeLabel, "Find edges of this type that are missing a directed link.",
"Gap Junction")
def check(self):
return (True, "")
def findInverseEdge(self, graph, source, target, links):
linkedStructures = graph.getStringProperty("LinkedStructures")
for edge in graph.getInOutEdges(target):
if tp.utils.getEdgeType(edge, graph) == self.dataSet[self._edgeTypeLabel]:
edgeSource = graph.source(edge)
edgeTarget = graph.target(edge)
edgeLinks = linkedStructures[edge]
if edgeSource == target and edgeTarget == source and links == edgeLinks:
return True
return False
def run(self):
graph = self.graph
viewSelection = self.graph.getBooleanProperty("viewSelection")
linkedStructures = graph.getStringProperty("LinkedStructures")
self.beginFileOutput()
# Loop over all edges. If edge is a gap junction, then search for a gap junction going the other direction.
for edge in graph.getEdges():
edgeType = tp.utils.getEdgeType(edge, graph)
if edgeType == self.dataSet[self._edgeTypeLabel]:
source = graph.source(edge)
target = graph.target(edge)
links = linkedStructures[edge]
if not self.findInverseEdge(graph, source, target, links):
output = str(tp.utils.getNodeId(source, graph)) + "-" + tp.utils.getNodeType(source,
graph) + "; " + str(
tp.utils.getNodeId(target, graph)) + "-" + tp.utils.getNodeType(target,
graph) + "; " + links
viewSelection[edge] = True
self.printToFile(output)
self.endFileOutput()
return True
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("BidirectionalSynapseCleanup", "Find Missing Bidirectional Synapses", "Kerzner",
"12/01/2017", "", "1.0")
|
Add plugin for finding bidirecitonal sypanses missing edges.
|
Add plugin for finding bidirecitonal sypanses missing edges.
|
Python
|
mit
|
visdesignlab/TulipPaths,visdesignlab/TulipPaths
|
Add plugin for finding bidirecitonal sypanses missing edges.
|
from tulip import *
import tulipplugins
import tulippaths as tp
from FileOutputPlugin import FileOutputPlugin
class BidirectionalSynapseCleanup(FileOutputPlugin):
def __init__(self, context):
FileOutputPlugin.__init__(self, context)
self._edgeTypeLabel = "Edge type"
self.addStringParameter(self._edgeTypeLabel, "Find edges of this type that are missing a directed link.",
"Gap Junction")
def check(self):
return (True, "")
def findInverseEdge(self, graph, source, target, links):
linkedStructures = graph.getStringProperty("LinkedStructures")
for edge in graph.getInOutEdges(target):
if tp.utils.getEdgeType(edge, graph) == self.dataSet[self._edgeTypeLabel]:
edgeSource = graph.source(edge)
edgeTarget = graph.target(edge)
edgeLinks = linkedStructures[edge]
if edgeSource == target and edgeTarget == source and links == edgeLinks:
return True
return False
def run(self):
graph = self.graph
viewSelection = self.graph.getBooleanProperty("viewSelection")
linkedStructures = graph.getStringProperty("LinkedStructures")
self.beginFileOutput()
# Loop over all edges. If edge is a gap junction, then search for a gap junction going the other direction.
for edge in graph.getEdges():
edgeType = tp.utils.getEdgeType(edge, graph)
if edgeType == self.dataSet[self._edgeTypeLabel]:
source = graph.source(edge)
target = graph.target(edge)
links = linkedStructures[edge]
if not self.findInverseEdge(graph, source, target, links):
output = str(tp.utils.getNodeId(source, graph)) + "-" + tp.utils.getNodeType(source,
graph) + "; " + str(
tp.utils.getNodeId(target, graph)) + "-" + tp.utils.getNodeType(target,
graph) + "; " + links
viewSelection[edge] = True
self.printToFile(output)
self.endFileOutput()
return True
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("BidirectionalSynapseCleanup", "Find Missing Bidirectional Synapses", "Kerzner",
"12/01/2017", "", "1.0")
|
<commit_before><commit_msg>Add plugin for finding bidirecitonal sypanses missing edges.<commit_after>
|
from tulip import *
import tulipplugins
import tulippaths as tp
from FileOutputPlugin import FileOutputPlugin
class BidirectionalSynapseCleanup(FileOutputPlugin):
def __init__(self, context):
FileOutputPlugin.__init__(self, context)
self._edgeTypeLabel = "Edge type"
self.addStringParameter(self._edgeTypeLabel, "Find edges of this type that are missing a directed link.",
"Gap Junction")
def check(self):
return (True, "")
def findInverseEdge(self, graph, source, target, links):
linkedStructures = graph.getStringProperty("LinkedStructures")
for edge in graph.getInOutEdges(target):
if tp.utils.getEdgeType(edge, graph) == self.dataSet[self._edgeTypeLabel]:
edgeSource = graph.source(edge)
edgeTarget = graph.target(edge)
edgeLinks = linkedStructures[edge]
if edgeSource == target and edgeTarget == source and links == edgeLinks:
return True
return False
def run(self):
graph = self.graph
viewSelection = self.graph.getBooleanProperty("viewSelection")
linkedStructures = graph.getStringProperty("LinkedStructures")
self.beginFileOutput()
# Loop over all edges. If edge is a gap junction, then search for a gap junction going the other direction.
for edge in graph.getEdges():
edgeType = tp.utils.getEdgeType(edge, graph)
if edgeType == self.dataSet[self._edgeTypeLabel]:
source = graph.source(edge)
target = graph.target(edge)
links = linkedStructures[edge]
if not self.findInverseEdge(graph, source, target, links):
output = str(tp.utils.getNodeId(source, graph)) + "-" + tp.utils.getNodeType(source,
graph) + "; " + str(
tp.utils.getNodeId(target, graph)) + "-" + tp.utils.getNodeType(target,
graph) + "; " + links
viewSelection[edge] = True
self.printToFile(output)
self.endFileOutput()
return True
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("BidirectionalSynapseCleanup", "Find Missing Bidirectional Synapses", "Kerzner",
"12/01/2017", "", "1.0")
|
Add plugin for finding bidirecitonal sypanses missing edges.from tulip import *
import tulipplugins
import tulippaths as tp
from FileOutputPlugin import FileOutputPlugin
class BidirectionalSynapseCleanup(FileOutputPlugin):
def __init__(self, context):
FileOutputPlugin.__init__(self, context)
self._edgeTypeLabel = "Edge type"
self.addStringParameter(self._edgeTypeLabel, "Find edges of this type that are missing a directed link.",
"Gap Junction")
def check(self):
return (True, "")
def findInverseEdge(self, graph, source, target, links):
linkedStructures = graph.getStringProperty("LinkedStructures")
for edge in graph.getInOutEdges(target):
if tp.utils.getEdgeType(edge, graph) == self.dataSet[self._edgeTypeLabel]:
edgeSource = graph.source(edge)
edgeTarget = graph.target(edge)
edgeLinks = linkedStructures[edge]
if edgeSource == target and edgeTarget == source and links == edgeLinks:
return True
return False
def run(self):
graph = self.graph
viewSelection = self.graph.getBooleanProperty("viewSelection")
linkedStructures = graph.getStringProperty("LinkedStructures")
self.beginFileOutput()
# Loop over all edges. If edge is a gap junction, then search for a gap junction going the other direction.
for edge in graph.getEdges():
edgeType = tp.utils.getEdgeType(edge, graph)
if edgeType == self.dataSet[self._edgeTypeLabel]:
source = graph.source(edge)
target = graph.target(edge)
links = linkedStructures[edge]
if not self.findInverseEdge(graph, source, target, links):
output = str(tp.utils.getNodeId(source, graph)) + "-" + tp.utils.getNodeType(source,
graph) + "; " + str(
tp.utils.getNodeId(target, graph)) + "-" + tp.utils.getNodeType(target,
graph) + "; " + links
viewSelection[edge] = True
self.printToFile(output)
self.endFileOutput()
return True
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("BidirectionalSynapseCleanup", "Find Missing Bidirectional Synapses", "Kerzner",
"12/01/2017", "", "1.0")
|
<commit_before><commit_msg>Add plugin for finding bidirecitonal sypanses missing edges.<commit_after>from tulip import *
import tulipplugins
import tulippaths as tp
from FileOutputPlugin import FileOutputPlugin
class BidirectionalSynapseCleanup(FileOutputPlugin):
def __init__(self, context):
FileOutputPlugin.__init__(self, context)
self._edgeTypeLabel = "Edge type"
self.addStringParameter(self._edgeTypeLabel, "Find edges of this type that are missing a directed link.",
"Gap Junction")
def check(self):
return (True, "")
def findInverseEdge(self, graph, source, target, links):
linkedStructures = graph.getStringProperty("LinkedStructures")
for edge in graph.getInOutEdges(target):
if tp.utils.getEdgeType(edge, graph) == self.dataSet[self._edgeTypeLabel]:
edgeSource = graph.source(edge)
edgeTarget = graph.target(edge)
edgeLinks = linkedStructures[edge]
if edgeSource == target and edgeTarget == source and links == edgeLinks:
return True
return False
def run(self):
graph = self.graph
viewSelection = self.graph.getBooleanProperty("viewSelection")
linkedStructures = graph.getStringProperty("LinkedStructures")
self.beginFileOutput()
# Loop over all edges. If edge is a gap junction, then search for a gap junction going the other direction.
for edge in graph.getEdges():
edgeType = tp.utils.getEdgeType(edge, graph)
if edgeType == self.dataSet[self._edgeTypeLabel]:
source = graph.source(edge)
target = graph.target(edge)
links = linkedStructures[edge]
if not self.findInverseEdge(graph, source, target, links):
output = str(tp.utils.getNodeId(source, graph)) + "-" + tp.utils.getNodeType(source,
graph) + "; " + str(
tp.utils.getNodeId(target, graph)) + "-" + tp.utils.getNodeType(target,
graph) + "; " + links
viewSelection[edge] = True
self.printToFile(output)
self.endFileOutput()
return True
# and updates the GUI to make it accessible through the menus.
tulipplugins.registerPlugin("BidirectionalSynapseCleanup", "Find Missing Bidirectional Synapses", "Kerzner",
"12/01/2017", "", "1.0")
|
|
aa2983686245f79e39aff81f88834375ca2a1b7b
|
client/test_feedback_proxy.py
|
client/test_feedback_proxy.py
|
#!/usr/bin/env python
#import httplib
import json
import os
import sys
import time
import urllib2
import ipaddr
nb_ip = 250
#nb_ip = 20
nb_test = 500
server = "iconnect2.iro.umontreal.ca"
port = 80
print sys.argv
if len(sys.argv) > 1:
server = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
print server, port
host = "%s:%s" % (server, port)
measurements = ""
for i in range(nb_ip):
measurements += '-132.204.26.%s,29000' % str(i % 255)
#con = httplib.HTTPConnection(host)
#con.connect()
#url = "feedback.py?jsoncallback=lll&measurements=" + measurements
#print url
t0 = time.time()
for i in range(nb_test):
# os.system("wget -O feedback.output %s/%s" % (host, url))
# con.request("GET", url)
# rep = con.getresponse()
out = urllib2.urlopen('http://%s/feedback.py?jsoncallback=lll&measurements=%s' % (host, measurements))
assert out.getcode() == 200
t1 = time.time()
print "time per request with %d ip %fs" % (nb_ip, (t1 - t0) / nb_test)
print "request/seconds", 1 / ((t1 - t0) / nb_test)
#print out.read()
|
Add a script to speed test the feedback.
|
Add a script to speed test the feedback.
|
Python
|
bsd-3-clause
|
lisa-lab/pings,lisa-lab/pings,lisa-lab/pings,lisa-lab/pings
|
Add a script to speed test the feedback.
|
#!/usr/bin/env python
#import httplib
import json
import os
import sys
import time
import urllib2
import ipaddr
nb_ip = 250
#nb_ip = 20
nb_test = 500
server = "iconnect2.iro.umontreal.ca"
port = 80
print sys.argv
if len(sys.argv) > 1:
server = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
print server, port
host = "%s:%s" % (server, port)
measurements = ""
for i in range(nb_ip):
measurements += '-132.204.26.%s,29000' % str(i % 255)
#con = httplib.HTTPConnection(host)
#con.connect()
#url = "feedback.py?jsoncallback=lll&measurements=" + measurements
#print url
t0 = time.time()
for i in range(nb_test):
# os.system("wget -O feedback.output %s/%s" % (host, url))
# con.request("GET", url)
# rep = con.getresponse()
out = urllib2.urlopen('http://%s/feedback.py?jsoncallback=lll&measurements=%s' % (host, measurements))
assert out.getcode() == 200
t1 = time.time()
print "time per request with %d ip %fs" % (nb_ip, (t1 - t0) / nb_test)
print "request/seconds", 1 / ((t1 - t0) / nb_test)
#print out.read()
|
<commit_before><commit_msg>Add a script to speed test the feedback.<commit_after>
|
#!/usr/bin/env python
#import httplib
import json
import os
import sys
import time
import urllib2
import ipaddr
nb_ip = 250
#nb_ip = 20
nb_test = 500
server = "iconnect2.iro.umontreal.ca"
port = 80
print sys.argv
if len(sys.argv) > 1:
server = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
print server, port
host = "%s:%s" % (server, port)
measurements = ""
for i in range(nb_ip):
measurements += '-132.204.26.%s,29000' % str(i % 255)
#con = httplib.HTTPConnection(host)
#con.connect()
#url = "feedback.py?jsoncallback=lll&measurements=" + measurements
#print url
t0 = time.time()
for i in range(nb_test):
# os.system("wget -O feedback.output %s/%s" % (host, url))
# con.request("GET", url)
# rep = con.getresponse()
out = urllib2.urlopen('http://%s/feedback.py?jsoncallback=lll&measurements=%s' % (host, measurements))
assert out.getcode() == 200
t1 = time.time()
print "time per request with %d ip %fs" % (nb_ip, (t1 - t0) / nb_test)
print "request/seconds", 1 / ((t1 - t0) / nb_test)
#print out.read()
|
Add a script to speed test the feedback.#!/usr/bin/env python
#import httplib
import json
import os
import sys
import time
import urllib2
import ipaddr
nb_ip = 250
#nb_ip = 20
nb_test = 500
server = "iconnect2.iro.umontreal.ca"
port = 80
print sys.argv
if len(sys.argv) > 1:
server = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
print server, port
host = "%s:%s" % (server, port)
measurements = ""
for i in range(nb_ip):
measurements += '-132.204.26.%s,29000' % str(i % 255)
#con = httplib.HTTPConnection(host)
#con.connect()
#url = "feedback.py?jsoncallback=lll&measurements=" + measurements
#print url
t0 = time.time()
for i in range(nb_test):
# os.system("wget -O feedback.output %s/%s" % (host, url))
# con.request("GET", url)
# rep = con.getresponse()
out = urllib2.urlopen('http://%s/feedback.py?jsoncallback=lll&measurements=%s' % (host, measurements))
assert out.getcode() == 200
t1 = time.time()
print "time per request with %d ip %fs" % (nb_ip, (t1 - t0) / nb_test)
print "request/seconds", 1 / ((t1 - t0) / nb_test)
#print out.read()
|
<commit_before><commit_msg>Add a script to speed test the feedback.<commit_after>#!/usr/bin/env python
#import httplib
import json
import os
import sys
import time
import urllib2
import ipaddr
nb_ip = 250
#nb_ip = 20
nb_test = 500
server = "iconnect2.iro.umontreal.ca"
port = 80
print sys.argv
if len(sys.argv) > 1:
server = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
print server, port
host = "%s:%s" % (server, port)
measurements = ""
for i in range(nb_ip):
measurements += '-132.204.26.%s,29000' % str(i % 255)
#con = httplib.HTTPConnection(host)
#con.connect()
#url = "feedback.py?jsoncallback=lll&measurements=" + measurements
#print url
t0 = time.time()
for i in range(nb_test):
# os.system("wget -O feedback.output %s/%s" % (host, url))
# con.request("GET", url)
# rep = con.getresponse()
out = urllib2.urlopen('http://%s/feedback.py?jsoncallback=lll&measurements=%s' % (host, measurements))
assert out.getcode() == 200
t1 = time.time()
print "time per request with %d ip %fs" % (nb_ip, (t1 - t0) / nb_test)
print "request/seconds", 1 / ((t1 - t0) / nb_test)
#print out.read()
|
|
c389c560cda1d4a3bc9a13dafb006e89d4cafa8f
|
timelaps.py
|
timelaps.py
|
import time
import picamera
VIDEO_DAYS = 1
FRAMES_PER_HOUR = 60
FRAMES = FRAMES_PER_HOUR * 24 * VIDEO_DAYS
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(2)
cam.capture('/home/pi/Desktop/frame%03d.jpg' % frame)
# Capture the images
for frame in range(FRAMES):
# Note the time before the capture
start = time.time()
capture_frame(frame)
# Wait for the next capture. Note that we take into
# account the length of time it took to capture the
# image when calculating the delay
time.sleep(
int(60 * 60 / FRAMES_PER_HOUR) - (time.time() - start)
)
|
Add Time Laps python file
|
Add Time Laps python file
Add Time Laps python file, exciting. Reference:
http://www.makeuseof.com/tag/raspberry-pi-camera-module/
|
Python
|
mit
|
RoelofZA/WeatherStation
|
Add Time Laps python file
Add Time Laps python file, exciting. Reference:
http://www.makeuseof.com/tag/raspberry-pi-camera-module/
|
import time
import picamera
VIDEO_DAYS = 1
FRAMES_PER_HOUR = 60
FRAMES = FRAMES_PER_HOUR * 24 * VIDEO_DAYS
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(2)
cam.capture('/home/pi/Desktop/frame%03d.jpg' % frame)
# Capture the images
for frame in range(FRAMES):
# Note the time before the capture
start = time.time()
capture_frame(frame)
# Wait for the next capture. Note that we take into
# account the length of time it took to capture the
# image when calculating the delay
time.sleep(
int(60 * 60 / FRAMES_PER_HOUR) - (time.time() - start)
)
|
<commit_before><commit_msg>Add Time Laps python file
Add Time Laps python file, exciting. Reference:
http://www.makeuseof.com/tag/raspberry-pi-camera-module/<commit_after>
|
import time
import picamera
VIDEO_DAYS = 1
FRAMES_PER_HOUR = 60
FRAMES = FRAMES_PER_HOUR * 24 * VIDEO_DAYS
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(2)
cam.capture('/home/pi/Desktop/frame%03d.jpg' % frame)
# Capture the images
for frame in range(FRAMES):
# Note the time before the capture
start = time.time()
capture_frame(frame)
# Wait for the next capture. Note that we take into
# account the length of time it took to capture the
# image when calculating the delay
time.sleep(
int(60 * 60 / FRAMES_PER_HOUR) - (time.time() - start)
)
|
Add Time Laps python file
Add Time Laps python file, exciting. Reference:
http://www.makeuseof.com/tag/raspberry-pi-camera-module/import time
import picamera
VIDEO_DAYS = 1
FRAMES_PER_HOUR = 60
FRAMES = FRAMES_PER_HOUR * 24 * VIDEO_DAYS
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(2)
cam.capture('/home/pi/Desktop/frame%03d.jpg' % frame)
# Capture the images
for frame in range(FRAMES):
# Note the time before the capture
start = time.time()
capture_frame(frame)
# Wait for the next capture. Note that we take into
# account the length of time it took to capture the
# image when calculating the delay
time.sleep(
int(60 * 60 / FRAMES_PER_HOUR) - (time.time() - start)
)
|
<commit_before><commit_msg>Add Time Laps python file
Add Time Laps python file, exciting. Reference:
http://www.makeuseof.com/tag/raspberry-pi-camera-module/<commit_after>import time
import picamera
VIDEO_DAYS = 1
FRAMES_PER_HOUR = 60
FRAMES = FRAMES_PER_HOUR * 24 * VIDEO_DAYS
def capture_frame(frame):
with picamera.PiCamera() as cam:
time.sleep(2)
cam.capture('/home/pi/Desktop/frame%03d.jpg' % frame)
# Capture the images
for frame in range(FRAMES):
# Note the time before the capture
start = time.time()
capture_frame(frame)
# Wait for the next capture. Note that we take into
# account the length of time it took to capture the
# image when calculating the delay
time.sleep(
int(60 * 60 / FRAMES_PER_HOUR) - (time.time() - start)
)
|
|
2dc2b301edd7fa6451399a726f8ba7328865a4c5
|
django/website/contacts/migrations/0006_auto_20160713_1115.py
|
django/website/contacts/migrations/0006_auto_20160713_1115.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_20160621_1456'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='area_of_specialisation',
),
migrations.RemoveField(
model_name='user',
name='business_address',
),
migrations.RemoveField(
model_name='user',
name='business_tel',
),
migrations.RemoveField(
model_name='user',
name='contact_type',
),
migrations.RemoveField(
model_name='user',
name='country',
),
migrations.RemoveField(
model_name='user',
name='cv',
),
migrations.RemoveField(
model_name='user',
name='fax',
),
migrations.RemoveField(
model_name='user',
name='gender',
),
migrations.RemoveField(
model_name='user',
name='home_address',
),
migrations.RemoveField(
model_name='user',
name='home_tel',
),
migrations.RemoveField(
model_name='user',
name='job_title',
),
migrations.RemoveField(
model_name='user',
name='mobile',
),
migrations.RemoveField(
model_name='user',
name='msn_id',
),
migrations.RemoveField(
model_name='user',
name='nationality',
),
migrations.RemoveField(
model_name='user',
name='notes',
),
migrations.RemoveField(
model_name='user',
name='personal_email',
),
migrations.RemoveField(
model_name='user',
name='picture',
),
migrations.RemoveField(
model_name='user',
name='skype_id',
),
migrations.RemoveField(
model_name='user',
name='title',
),
migrations.RemoveField(
model_name='user',
name='yahoo_messenger',
),
]
|
Add migration to remove fields from contacts
|
Add migration to remove fields from contacts
|
Python
|
agpl-3.0
|
aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/kashana,aptivate/kashana,aptivate/alfie,aptivate/alfie,daniell/kashana,daniell/kashana,aptivate/kashana,daniell/kashana,daniell/kashana
|
Add migration to remove fields from contacts
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_20160621_1456'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='area_of_specialisation',
),
migrations.RemoveField(
model_name='user',
name='business_address',
),
migrations.RemoveField(
model_name='user',
name='business_tel',
),
migrations.RemoveField(
model_name='user',
name='contact_type',
),
migrations.RemoveField(
model_name='user',
name='country',
),
migrations.RemoveField(
model_name='user',
name='cv',
),
migrations.RemoveField(
model_name='user',
name='fax',
),
migrations.RemoveField(
model_name='user',
name='gender',
),
migrations.RemoveField(
model_name='user',
name='home_address',
),
migrations.RemoveField(
model_name='user',
name='home_tel',
),
migrations.RemoveField(
model_name='user',
name='job_title',
),
migrations.RemoveField(
model_name='user',
name='mobile',
),
migrations.RemoveField(
model_name='user',
name='msn_id',
),
migrations.RemoveField(
model_name='user',
name='nationality',
),
migrations.RemoveField(
model_name='user',
name='notes',
),
migrations.RemoveField(
model_name='user',
name='personal_email',
),
migrations.RemoveField(
model_name='user',
name='picture',
),
migrations.RemoveField(
model_name='user',
name='skype_id',
),
migrations.RemoveField(
model_name='user',
name='title',
),
migrations.RemoveField(
model_name='user',
name='yahoo_messenger',
),
]
|
<commit_before><commit_msg>Add migration to remove fields from contacts<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_20160621_1456'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='area_of_specialisation',
),
migrations.RemoveField(
model_name='user',
name='business_address',
),
migrations.RemoveField(
model_name='user',
name='business_tel',
),
migrations.RemoveField(
model_name='user',
name='contact_type',
),
migrations.RemoveField(
model_name='user',
name='country',
),
migrations.RemoveField(
model_name='user',
name='cv',
),
migrations.RemoveField(
model_name='user',
name='fax',
),
migrations.RemoveField(
model_name='user',
name='gender',
),
migrations.RemoveField(
model_name='user',
name='home_address',
),
migrations.RemoveField(
model_name='user',
name='home_tel',
),
migrations.RemoveField(
model_name='user',
name='job_title',
),
migrations.RemoveField(
model_name='user',
name='mobile',
),
migrations.RemoveField(
model_name='user',
name='msn_id',
),
migrations.RemoveField(
model_name='user',
name='nationality',
),
migrations.RemoveField(
model_name='user',
name='notes',
),
migrations.RemoveField(
model_name='user',
name='personal_email',
),
migrations.RemoveField(
model_name='user',
name='picture',
),
migrations.RemoveField(
model_name='user',
name='skype_id',
),
migrations.RemoveField(
model_name='user',
name='title',
),
migrations.RemoveField(
model_name='user',
name='yahoo_messenger',
),
]
|
Add migration to remove fields from contacts# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_20160621_1456'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='area_of_specialisation',
),
migrations.RemoveField(
model_name='user',
name='business_address',
),
migrations.RemoveField(
model_name='user',
name='business_tel',
),
migrations.RemoveField(
model_name='user',
name='contact_type',
),
migrations.RemoveField(
model_name='user',
name='country',
),
migrations.RemoveField(
model_name='user',
name='cv',
),
migrations.RemoveField(
model_name='user',
name='fax',
),
migrations.RemoveField(
model_name='user',
name='gender',
),
migrations.RemoveField(
model_name='user',
name='home_address',
),
migrations.RemoveField(
model_name='user',
name='home_tel',
),
migrations.RemoveField(
model_name='user',
name='job_title',
),
migrations.RemoveField(
model_name='user',
name='mobile',
),
migrations.RemoveField(
model_name='user',
name='msn_id',
),
migrations.RemoveField(
model_name='user',
name='nationality',
),
migrations.RemoveField(
model_name='user',
name='notes',
),
migrations.RemoveField(
model_name='user',
name='personal_email',
),
migrations.RemoveField(
model_name='user',
name='picture',
),
migrations.RemoveField(
model_name='user',
name='skype_id',
),
migrations.RemoveField(
model_name='user',
name='title',
),
migrations.RemoveField(
model_name='user',
name='yahoo_messenger',
),
]
|
<commit_before><commit_msg>Add migration to remove fields from contacts<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0005_auto_20160621_1456'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='area_of_specialisation',
),
migrations.RemoveField(
model_name='user',
name='business_address',
),
migrations.RemoveField(
model_name='user',
name='business_tel',
),
migrations.RemoveField(
model_name='user',
name='contact_type',
),
migrations.RemoveField(
model_name='user',
name='country',
),
migrations.RemoveField(
model_name='user',
name='cv',
),
migrations.RemoveField(
model_name='user',
name='fax',
),
migrations.RemoveField(
model_name='user',
name='gender',
),
migrations.RemoveField(
model_name='user',
name='home_address',
),
migrations.RemoveField(
model_name='user',
name='home_tel',
),
migrations.RemoveField(
model_name='user',
name='job_title',
),
migrations.RemoveField(
model_name='user',
name='mobile',
),
migrations.RemoveField(
model_name='user',
name='msn_id',
),
migrations.RemoveField(
model_name='user',
name='nationality',
),
migrations.RemoveField(
model_name='user',
name='notes',
),
migrations.RemoveField(
model_name='user',
name='personal_email',
),
migrations.RemoveField(
model_name='user',
name='picture',
),
migrations.RemoveField(
model_name='user',
name='skype_id',
),
migrations.RemoveField(
model_name='user',
name='title',
),
migrations.RemoveField(
model_name='user',
name='yahoo_messenger',
),
]
|
|
0bef2f3c6307ccdc8222e68d224d7c9aac215237
|
code/run_predictions.py
|
code/run_predictions.py
|
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
trIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.trIndices'
teIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.teIndices'
ssIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.ss%d.ssIndices'
for fold_idx in xrange (args.num_folds) :
with open(trIndices_fname %(fold_idx), 'r') as trIndices_f :
line = trIndices_f.readline().split()
trIndices = [int (i) for i in line ]
with open(teIndices_fname %(fold_idx),'r') as teIndices_f :
line = teIndices_f.readline().split()
teIndices = [int (i) for i in line ]
sde.run_predictions(fold_idx, args, resu_dir, data_dir, trIndices, teIndices )
|
Create script runing predictions for each rep and each fold
|
Create script runing predictions for each rep and each fold
|
Python
|
mit
|
chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan,chagaz/sfan
|
Create script runing predictions for each rep and each fold
|
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
trIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.trIndices'
teIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.teIndices'
ssIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.ss%d.ssIndices'
for fold_idx in xrange (args.num_folds) :
with open(trIndices_fname %(fold_idx), 'r') as trIndices_f :
line = trIndices_f.readline().split()
trIndices = [int (i) for i in line ]
with open(teIndices_fname %(fold_idx),'r') as teIndices_f :
line = teIndices_f.readline().split()
teIndices = [int (i) for i in line ]
sde.run_predictions(fold_idx, args, resu_dir, data_dir, trIndices, teIndices )
|
<commit_before><commit_msg>Create script runing predictions for each rep and each fold<commit_after>
|
import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
trIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.trIndices'
teIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.teIndices'
ssIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.ss%d.ssIndices'
for fold_idx in xrange (args.num_folds) :
with open(trIndices_fname %(fold_idx), 'r') as trIndices_f :
line = trIndices_f.readline().split()
trIndices = [int (i) for i in line ]
with open(teIndices_fname %(fold_idx),'r') as teIndices_f :
line = teIndices_f.readline().split()
teIndices = [int (i) for i in line ]
sde.run_predictions(fold_idx, args, resu_dir, data_dir, trIndices, teIndices )
|
Create script runing predictions for each rep and each foldimport synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
trIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.trIndices'
teIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.teIndices'
ssIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.ss%d.ssIndices'
for fold_idx in xrange (args.num_folds) :
with open(trIndices_fname %(fold_idx), 'r') as trIndices_f :
line = trIndices_f.readline().split()
trIndices = [int (i) for i in line ]
with open(teIndices_fname %(fold_idx),'r') as teIndices_f :
line = teIndices_f.readline().split()
teIndices = [int (i) for i in line ]
sde.run_predictions(fold_idx, args, resu_dir, data_dir, trIndices, teIndices )
|
<commit_before><commit_msg>Create script runing predictions for each rep and each fold<commit_after>import synthetic_data_experiments as sde
import logging
if __name__ == "__main__":
args = sde.get_integrous_arguments_values()
for repeat_idx in xrange(args.num_repeats) :
resu_dir = "%s/repeat_%d" % (args.resu_dir, repeat_idx)
data_dir = '%s/repeat_%d' % (args.data_dir, repeat_idx)
trIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.trIndices'
teIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.teIndices'
ssIndices_fname = data_dir+'/'+args.simu_id+'.fold%d.ss%d.ssIndices'
for fold_idx in xrange (args.num_folds) :
with open(trIndices_fname %(fold_idx), 'r') as trIndices_f :
line = trIndices_f.readline().split()
trIndices = [int (i) for i in line ]
with open(teIndices_fname %(fold_idx),'r') as teIndices_f :
line = teIndices_f.readline().split()
teIndices = [int (i) for i in line ]
sde.run_predictions(fold_idx, args, resu_dir, data_dir, trIndices, teIndices )
|
|
56da9b10abcc94a341488e5032e5617d0def9d56
|
mongo_stats.py
|
mongo_stats.py
|
#!/usr/bin/env python
# Dump stats for raw and queue.
import os
import time
import pymongo
import json
conn = pymongo.Connection()
# db -> collections
dbs = {
'raw' : ['commits', 'repos', 'users'],
'queue' : ['commits', 'repos', 'users']
}
for db, collections in dbs.iteritems():
for collection in collections:
count = conn[db][collection].count()
print "%s.%s: %i" % (db, collection, count)
|
Add small script to print out db stats
|
Add small script to print out db stats
|
Python
|
mit
|
emarschner/gothub,emarschner/gothub,emarschner/gothub,emarschner/gothub
|
Add small script to print out db stats
|
#!/usr/bin/env python
# Dump stats for raw and queue.
import os
import time
import pymongo
import json
conn = pymongo.Connection()
# db -> collections
dbs = {
'raw' : ['commits', 'repos', 'users'],
'queue' : ['commits', 'repos', 'users']
}
for db, collections in dbs.iteritems():
for collection in collections:
count = conn[db][collection].count()
print "%s.%s: %i" % (db, collection, count)
|
<commit_before><commit_msg>Add small script to print out db stats<commit_after>
|
#!/usr/bin/env python
# Dump stats for raw and queue.
import os
import time
import pymongo
import json
conn = pymongo.Connection()
# db -> collections
dbs = {
'raw' : ['commits', 'repos', 'users'],
'queue' : ['commits', 'repos', 'users']
}
for db, collections in dbs.iteritems():
for collection in collections:
count = conn[db][collection].count()
print "%s.%s: %i" % (db, collection, count)
|
Add small script to print out db stats#!/usr/bin/env python
# Dump stats for raw and queue.
import os
import time
import pymongo
import json
conn = pymongo.Connection()
# db -> collections
dbs = {
'raw' : ['commits', 'repos', 'users'],
'queue' : ['commits', 'repos', 'users']
}
for db, collections in dbs.iteritems():
for collection in collections:
count = conn[db][collection].count()
print "%s.%s: %i" % (db, collection, count)
|
<commit_before><commit_msg>Add small script to print out db stats<commit_after>#!/usr/bin/env python
# Dump stats for raw and queue.
import os
import time
import pymongo
import json
conn = pymongo.Connection()
# db -> collections
dbs = {
'raw' : ['commits', 'repos', 'users'],
'queue' : ['commits', 'repos', 'users']
}
for db, collections in dbs.iteritems():
for collection in collections:
count = conn[db][collection].count()
print "%s.%s: %i" % (db, collection, count)
|
|
32912c8c5c02d1922f56bac4b8c97f4e53eccb81
|
muffin_peewee/debugtoolbar.py
|
muffin_peewee/debugtoolbar.py
|
import logging
import jinja2
import datetime as dt
from muffin_debugtoolbar.panels import DebugPanel
from muffin_debugtoolbar.utils import LoggingTrackingHandler
LOGGER = logging.getLogger('peewee')
class PeeweeDebugPanel(DebugPanel):
name = 'Peewee queries'
template = jinja2.Template("""
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Time</th>
<th>Query</th>
</tr>
</thead>
<tbody>
{% for record in records %}
<tr>
<td>{{ record['time'] }}</td>
<td>{{ record['message'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
""")
def __init__(self, app, request=None):
super(PeeweeDebugPanel, self).__init__(app, request)
LOGGER.setLevel(logging.DEBUG)
self.handler = LoggingTrackingHandler()
def wrap_handler(self, handler, context_switcher):
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
@property
def nav_title(self):
""" Get a navigation title. """
return "%s (%s)" % (self.title, len(self.handler.records))
@property
def has_content(self):
return self.handler.records
def render_vars(self):
return {
'records': [
{
'message': record.getMessage(),
'time': dt.datetime.fromtimestamp(record.created).strftime('%H:%M:%S'),
} for record in self.handler.records
]
}
|
Add debugpanel for Muffin Debugtoolbar
|
Add debugpanel for Muffin Debugtoolbar
|
Python
|
mit
|
klen/muffin-peewee
|
Add debugpanel for Muffin Debugtoolbar
|
import logging
import jinja2
import datetime as dt
from muffin_debugtoolbar.panels import DebugPanel
from muffin_debugtoolbar.utils import LoggingTrackingHandler
LOGGER = logging.getLogger('peewee')
class PeeweeDebugPanel(DebugPanel):
name = 'Peewee queries'
template = jinja2.Template("""
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Time</th>
<th>Query</th>
</tr>
</thead>
<tbody>
{% for record in records %}
<tr>
<td>{{ record['time'] }}</td>
<td>{{ record['message'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
""")
def __init__(self, app, request=None):
super(PeeweeDebugPanel, self).__init__(app, request)
LOGGER.setLevel(logging.DEBUG)
self.handler = LoggingTrackingHandler()
def wrap_handler(self, handler, context_switcher):
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
@property
def nav_title(self):
""" Get a navigation title. """
return "%s (%s)" % (self.title, len(self.handler.records))
@property
def has_content(self):
return self.handler.records
def render_vars(self):
return {
'records': [
{
'message': record.getMessage(),
'time': dt.datetime.fromtimestamp(record.created).strftime('%H:%M:%S'),
} for record in self.handler.records
]
}
|
<commit_before><commit_msg>Add debugpanel for Muffin Debugtoolbar<commit_after>
|
import logging
import jinja2
import datetime as dt
from muffin_debugtoolbar.panels import DebugPanel
from muffin_debugtoolbar.utils import LoggingTrackingHandler
LOGGER = logging.getLogger('peewee')
class PeeweeDebugPanel(DebugPanel):
name = 'Peewee queries'
template = jinja2.Template("""
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Time</th>
<th>Query</th>
</tr>
</thead>
<tbody>
{% for record in records %}
<tr>
<td>{{ record['time'] }}</td>
<td>{{ record['message'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
""")
def __init__(self, app, request=None):
super(PeeweeDebugPanel, self).__init__(app, request)
LOGGER.setLevel(logging.DEBUG)
self.handler = LoggingTrackingHandler()
def wrap_handler(self, handler, context_switcher):
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
@property
def nav_title(self):
""" Get a navigation title. """
return "%s (%s)" % (self.title, len(self.handler.records))
@property
def has_content(self):
return self.handler.records
def render_vars(self):
return {
'records': [
{
'message': record.getMessage(),
'time': dt.datetime.fromtimestamp(record.created).strftime('%H:%M:%S'),
} for record in self.handler.records
]
}
|
Add debugpanel for Muffin Debugtoolbarimport logging
import jinja2
import datetime as dt
from muffin_debugtoolbar.panels import DebugPanel
from muffin_debugtoolbar.utils import LoggingTrackingHandler
LOGGER = logging.getLogger('peewee')
class PeeweeDebugPanel(DebugPanel):
name = 'Peewee queries'
template = jinja2.Template("""
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Time</th>
<th>Query</th>
</tr>
</thead>
<tbody>
{% for record in records %}
<tr>
<td>{{ record['time'] }}</td>
<td>{{ record['message'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
""")
def __init__(self, app, request=None):
super(PeeweeDebugPanel, self).__init__(app, request)
LOGGER.setLevel(logging.DEBUG)
self.handler = LoggingTrackingHandler()
def wrap_handler(self, handler, context_switcher):
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
@property
def nav_title(self):
""" Get a navigation title. """
return "%s (%s)" % (self.title, len(self.handler.records))
@property
def has_content(self):
return self.handler.records
def render_vars(self):
return {
'records': [
{
'message': record.getMessage(),
'time': dt.datetime.fromtimestamp(record.created).strftime('%H:%M:%S'),
} for record in self.handler.records
]
}
|
<commit_before><commit_msg>Add debugpanel for Muffin Debugtoolbar<commit_after>import logging
import jinja2
import datetime as dt
from muffin_debugtoolbar.panels import DebugPanel
from muffin_debugtoolbar.utils import LoggingTrackingHandler
LOGGER = logging.getLogger('peewee')
class PeeweeDebugPanel(DebugPanel):
name = 'Peewee queries'
template = jinja2.Template("""
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Time</th>
<th>Query</th>
</tr>
</thead>
<tbody>
{% for record in records %}
<tr>
<td>{{ record['time'] }}</td>
<td>{{ record['message'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
""")
def __init__(self, app, request=None):
super(PeeweeDebugPanel, self).__init__(app, request)
LOGGER.setLevel(logging.DEBUG)
self.handler = LoggingTrackingHandler()
def wrap_handler(self, handler, context_switcher):
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler))
@property
def nav_title(self):
""" Get a navigation title. """
return "%s (%s)" % (self.title, len(self.handler.records))
@property
def has_content(self):
return self.handler.records
def render_vars(self):
return {
'records': [
{
'message': record.getMessage(),
'time': dt.datetime.fromtimestamp(record.created).strftime('%H:%M:%S'),
} for record in self.handler.records
]
}
|
|
9232d69874ae5d1b2b7ae86476aa690a1bb89029
|
distarray/core/tests/test_distributed_array_protocol.py
|
distarray/core/tests/test_distributed_array_protocol.py
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.DistArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def testFoo(self):
self.assertIsInstance(self.arr, da.DistArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Add stub test file for Distributed Array Protocol.
|
Add stub test file for Distributed Array Protocol.
|
Python
|
bsd-3-clause
|
enthought/distarray,RaoUmer/distarray,enthought/distarray,RaoUmer/distarray
|
Add stub test file for Distributed Array Protocol.
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.DistArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def testFoo(self):
self.assertIsInstance(self.arr, da.DistArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
<commit_before><commit_msg>Add stub test file for Distributed Array Protocol.<commit_after>
|
import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.DistArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def testFoo(self):
self.assertIsInstance(self.arr, da.DistArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Add stub test file for Distributed Array Protocol.import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.DistArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def testFoo(self):
self.assertIsInstance(self.arr, da.DistArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
<commit_before><commit_msg>Add stub test file for Distributed Array Protocol.<commit_after>import unittest
import distarray as da
from distarray.mpi.mpibase import create_comm_of_size, InvalidCommSizeError
class TestDistributedArrayProtocol(unittest.TestCase):
def setUp(self):
try:
comm = create_comm_of_size(4)
except InvalidCommSizeError:
raise unittest.SkipTest('Must run with comm size > 4.')
else:
self.arr = da.DistArray((16,16),
grid_shape=(4,),
comm=comm, buf=None, offset=0)
def testFoo(self):
self.assertIsInstance(self.arr, da.DistArray)
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
|
577d13a232b426960dae0b28c63ecac7c33b2643
|
nose2/tests/functional/test_main.py
|
nose2/tests/functional/test_main.py
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
def test_extra_hooks(self):
class Check(object):
ran = False
def startTestRun(self, event):
self.ran = True
check = Check()
proc = self.runIn('scenario/no_tests', hooks=[('startTestRun', check)])
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
assert check.ran, "Extra hook did not execute"
|
Add test for hooks kwarg to PluggableTestProgram
|
Add test for hooks kwarg to PluggableTestProgram
|
Python
|
bsd-2-clause
|
ojengwa/nose2,ezigman/nose2,leth/nose2,little-dude/nose2,ptthiem/nose2,leth/nose2,ezigman/nose2,ojengwa/nose2,ptthiem/nose2,little-dude/nose2
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
Add test for hooks kwarg to PluggableTestProgram
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
def test_extra_hooks(self):
class Check(object):
ran = False
def startTestRun(self, event):
self.ran = True
check = Check()
proc = self.runIn('scenario/no_tests', hooks=[('startTestRun', check)])
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
assert check.ran, "Extra hook did not execute"
|
<commit_before>from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
<commit_msg>Add test for hooks kwarg to PluggableTestProgram<commit_after>
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
def test_extra_hooks(self):
class Check(object):
ran = False
def startTestRun(self, event):
self.ran = True
check = Check()
proc = self.runIn('scenario/no_tests', hooks=[('startTestRun', check)])
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
assert check.ran, "Extra hook did not execute"
|
from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
Add test for hooks kwarg to PluggableTestProgramfrom nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
def test_extra_hooks(self):
class Check(object):
ran = False
def startTestRun(self, event):
self.ran = True
check = Check()
proc = self.runIn('scenario/no_tests', hooks=[('startTestRun', check)])
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
assert check.ran, "Extra hook did not execute"
|
<commit_before>from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
<commit_msg>Add test for hooks kwarg to PluggableTestProgram<commit_after>from nose2.tests._common import FunctionalTestCase
class TestPluggableTestProgram(FunctionalTestCase):
def test_run_in_empty_dir_succeeds(self):
proc = self.runIn('scenario/no_tests')
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
def test_extra_hooks(self):
class Check(object):
ran = False
def startTestRun(self, event):
self.ran = True
check = Check()
proc = self.runIn('scenario/no_tests', hooks=[('startTestRun', check)])
stdout, stderr = proc.communicate()
self.assertEqual(proc.poll(), 0, stderr)
assert check.ran, "Extra hook did not execute"
|
d3c95eb39a7fbaa2028b00176e71b192bf6d6e1b
|
app/soc/modules/gci/models/static_content.py
|
app/soc/modules/gci/models/static_content.py
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the GCIStaticContent model."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from django.utils import translation
class GCIStaticContent(db.Model):
"""GCI static content name and its blobstore key.
Parent:
soc.modules.gci.models.program.GCIProgram
"""
#: Identifier of the content which is the last part of its unique key name
content_id = db.StringProperty(required=True,
verbose_name=translation.ugettext('Content ID'))
content_id.help_text = translation.ugettext(
'Used as part of URL link to access this content.')
#: Property pointing to the work uploaded as a file or archive
content = blobstore.BlobReferenceProperty(
required=True, verbose_name=translation.ugettext('Content'))
content.help_text = translation.ugettext(
'Static content as a single file or as archive (max file size: 32 MB)')
#: Property containing the date when the content was first uploaded
created_on = db.DateTimeProperty(
required=True, auto_now_add=True,
verbose_name=translation.ugettext('Created on'))
#: Property containing the date when the content was updated
updated_on = db.DateTimeProperty(
required=True, auto_now=True,
verbose_name=translation.ugettext('Updated on'))
|
Implement a model to store references to program's static content.
|
Implement a model to store references to program's static content.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
Implement a model to store references to program's static content.
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the GCIStaticContent model."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from django.utils import translation
class GCIStaticContent(db.Model):
"""GCI static content name and its blobstore key.
Parent:
soc.modules.gci.models.program.GCIProgram
"""
#: Identifier of the content which is the last part of its unique key name
content_id = db.StringProperty(required=True,
verbose_name=translation.ugettext('Content ID'))
content_id.help_text = translation.ugettext(
'Used as part of URL link to access this content.')
#: Property pointing to the work uploaded as a file or archive
content = blobstore.BlobReferenceProperty(
required=True, verbose_name=translation.ugettext('Content'))
content.help_text = translation.ugettext(
'Static content as a single file or as archive (max file size: 32 MB)')
#: Property containing the date when the content was first uploaded
created_on = db.DateTimeProperty(
required=True, auto_now_add=True,
verbose_name=translation.ugettext('Created on'))
#: Property containing the date when the content was updated
updated_on = db.DateTimeProperty(
required=True, auto_now=True,
verbose_name=translation.ugettext('Updated on'))
|
<commit_before><commit_msg>Implement a model to store references to program's static content.<commit_after>
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the GCIStaticContent model."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from django.utils import translation
class GCIStaticContent(db.Model):
"""GCI static content name and its blobstore key.
Parent:
soc.modules.gci.models.program.GCIProgram
"""
#: Identifier of the content which is the last part of its unique key name
content_id = db.StringProperty(required=True,
verbose_name=translation.ugettext('Content ID'))
content_id.help_text = translation.ugettext(
'Used as part of URL link to access this content.')
#: Property pointing to the work uploaded as a file or archive
content = blobstore.BlobReferenceProperty(
required=True, verbose_name=translation.ugettext('Content'))
content.help_text = translation.ugettext(
'Static content as a single file or as archive (max file size: 32 MB)')
#: Property containing the date when the content was first uploaded
created_on = db.DateTimeProperty(
required=True, auto_now_add=True,
verbose_name=translation.ugettext('Created on'))
#: Property containing the date when the content was updated
updated_on = db.DateTimeProperty(
required=True, auto_now=True,
verbose_name=translation.ugettext('Updated on'))
|
Implement a model to store references to program's static content.# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the GCIStaticContent model."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from django.utils import translation
class GCIStaticContent(db.Model):
"""GCI static content name and its blobstore key.
Parent:
soc.modules.gci.models.program.GCIProgram
"""
#: Identifier of the content which is the last part of its unique key name
content_id = db.StringProperty(required=True,
verbose_name=translation.ugettext('Content ID'))
content_id.help_text = translation.ugettext(
'Used as part of URL link to access this content.')
#: Property pointing to the work uploaded as a file or archive
content = blobstore.BlobReferenceProperty(
required=True, verbose_name=translation.ugettext('Content'))
content.help_text = translation.ugettext(
'Static content as a single file or as archive (max file size: 32 MB)')
#: Property containing the date when the content was first uploaded
created_on = db.DateTimeProperty(
required=True, auto_now_add=True,
verbose_name=translation.ugettext('Created on'))
#: Property containing the date when the content was updated
updated_on = db.DateTimeProperty(
required=True, auto_now=True,
verbose_name=translation.ugettext('Updated on'))
|
<commit_before><commit_msg>Implement a model to store references to program's static content.<commit_after># Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing the GCIStaticContent model."""
from google.appengine.ext import blobstore
from google.appengine.ext import db
from django.utils import translation
class GCIStaticContent(db.Model):
"""GCI static content name and its blobstore key.
Parent:
soc.modules.gci.models.program.GCIProgram
"""
#: Identifier of the content which is the last part of its unique key name
content_id = db.StringProperty(required=True,
verbose_name=translation.ugettext('Content ID'))
content_id.help_text = translation.ugettext(
'Used as part of URL link to access this content.')
#: Property pointing to the work uploaded as a file or archive
content = blobstore.BlobReferenceProperty(
required=True, verbose_name=translation.ugettext('Content'))
content.help_text = translation.ugettext(
'Static content as a single file or as archive (max file size: 32 MB)')
#: Property containing the date when the content was first uploaded
created_on = db.DateTimeProperty(
required=True, auto_now_add=True,
verbose_name=translation.ugettext('Created on'))
#: Property containing the date when the content was updated
updated_on = db.DateTimeProperty(
required=True, auto_now=True,
verbose_name=translation.ugettext('Updated on'))
|
|
9cb23fe24ddf244b3e2569ce1760202995ab035d
|
tohu/v3/derived_generators.py
|
tohu/v3/derived_generators.py
|
from .base import TohuBaseGenerator
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
|
Add derived generator Apply which allows to apply a function to a set of input generators
|
Add derived generator Apply which allows to apply a function to a set of input generators
|
Python
|
mit
|
maxalbert/tohu
|
Add derived generator Apply which allows to apply a function to a set of input generators
|
from .base import TohuBaseGenerator
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
|
<commit_before><commit_msg>Add derived generator Apply which allows to apply a function to a set of input generators<commit_after>
|
from .base import TohuBaseGenerator
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
|
Add derived generator Apply which allows to apply a function to a set of input generatorsfrom .base import TohuBaseGenerator
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
|
<commit_before><commit_msg>Add derived generator Apply which allows to apply a function to a set of input generators<commit_after>from .base import TohuBaseGenerator
class Apply(TohuBaseGenerator):
def __init__(self, func, *arg_gens, **kwarg_gens):
self.func = func
self.orig_arg_gens = arg_gens
self.orig_kwarg_gens = kwarg_gens
self.arg_gens = [g.clone() for g in arg_gens]
self.kwarg_gens = {name: g.clone() for name, g in kwarg_gens.items()}
def __next__(self):
next_args = (next(g) for g in self.arg_gens)
next_kwargs = {name: next(g) for name, g in self.kwarg_gens.items()}
return self.func(*next_args, **next_kwargs)
|
|
60c70bf439d35238c0331b665ece60cf9718cf0f
|
src/swap-cname.py
|
src/swap-cname.py
|
#!/usr/bin/env python
import os
import sys
import httplib
import json
if len(sys.argv) != 3:
print """ERROR: wrong number of arguments.
Usage: tsuru swap-cname app1-name app2-name
Swap cname between two apps."""
sys.exit(1)
def get_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("GET", "/apps/" + app, "", headers)
response = conn.getresponse()
data = json.loads(response.read())
if len(data.get("cname")) == 0:
return None
return data.get("cname")
def remove_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("DELETE", "/apps/" + app + '/cname', '', headers)
response = conn.getresponse()
def set_cname(app, cname):
headers = {"Content-Type" : "application/json", "Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("POST", "/apps/" + app + '/cname', '{"cname": "' + cname + '"}', headers)
response = conn.getresponse()
token = os.environ['TSURU_TOKEN']
target = os.environ['TSURU_TARGET']
apps = [sys.argv[1], sys.argv[2]]
cnames = [get_cname(apps[1]), get_cname(apps[0])]
for i,app in enumerate(apps):
if cnames[i] is not None:
set_cname(app, cnames[i])
print 'app ' + app + ' is live at ' + cnames[i]
else:
remove_cname(app)
|
Allow came swap between two applications.
|
Allow came swap between two applications.
|
Python
|
mit
|
emerleite/tsuru-swap-cname
|
Allow came swap between two applications.
|
#!/usr/bin/env python
import os
import sys
import httplib
import json
if len(sys.argv) != 3:
print """ERROR: wrong number of arguments.
Usage: tsuru swap-cname app1-name app2-name
Swap cname between two apps."""
sys.exit(1)
def get_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("GET", "/apps/" + app, "", headers)
response = conn.getresponse()
data = json.loads(response.read())
if len(data.get("cname")) == 0:
return None
return data.get("cname")
def remove_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("DELETE", "/apps/" + app + '/cname', '', headers)
response = conn.getresponse()
def set_cname(app, cname):
headers = {"Content-Type" : "application/json", "Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("POST", "/apps/" + app + '/cname', '{"cname": "' + cname + '"}', headers)
response = conn.getresponse()
token = os.environ['TSURU_TOKEN']
target = os.environ['TSURU_TARGET']
apps = [sys.argv[1], sys.argv[2]]
cnames = [get_cname(apps[1]), get_cname(apps[0])]
for i,app in enumerate(apps):
if cnames[i] is not None:
set_cname(app, cnames[i])
print 'app ' + app + ' is live at ' + cnames[i]
else:
remove_cname(app)
|
<commit_before><commit_msg>Allow came swap between two applications.<commit_after>
|
#!/usr/bin/env python
import os
import sys
import httplib
import json
if len(sys.argv) != 3:
print """ERROR: wrong number of arguments.
Usage: tsuru swap-cname app1-name app2-name
Swap cname between two apps."""
sys.exit(1)
def get_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("GET", "/apps/" + app, "", headers)
response = conn.getresponse()
data = json.loads(response.read())
if len(data.get("cname")) == 0:
return None
return data.get("cname")
def remove_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("DELETE", "/apps/" + app + '/cname', '', headers)
response = conn.getresponse()
def set_cname(app, cname):
headers = {"Content-Type" : "application/json", "Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("POST", "/apps/" + app + '/cname', '{"cname": "' + cname + '"}', headers)
response = conn.getresponse()
token = os.environ['TSURU_TOKEN']
target = os.environ['TSURU_TARGET']
apps = [sys.argv[1], sys.argv[2]]
cnames = [get_cname(apps[1]), get_cname(apps[0])]
for i,app in enumerate(apps):
if cnames[i] is not None:
set_cname(app, cnames[i])
print 'app ' + app + ' is live at ' + cnames[i]
else:
remove_cname(app)
|
Allow came swap between two applications.#!/usr/bin/env python
import os
import sys
import httplib
import json
if len(sys.argv) != 3:
print """ERROR: wrong number of arguments.
Usage: tsuru swap-cname app1-name app2-name
Swap cname between two apps."""
sys.exit(1)
def get_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("GET", "/apps/" + app, "", headers)
response = conn.getresponse()
data = json.loads(response.read())
if len(data.get("cname")) == 0:
return None
return data.get("cname")
def remove_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("DELETE", "/apps/" + app + '/cname', '', headers)
response = conn.getresponse()
def set_cname(app, cname):
headers = {"Content-Type" : "application/json", "Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("POST", "/apps/" + app + '/cname', '{"cname": "' + cname + '"}', headers)
response = conn.getresponse()
token = os.environ['TSURU_TOKEN']
target = os.environ['TSURU_TARGET']
apps = [sys.argv[1], sys.argv[2]]
cnames = [get_cname(apps[1]), get_cname(apps[0])]
for i,app in enumerate(apps):
if cnames[i] is not None:
set_cname(app, cnames[i])
print 'app ' + app + ' is live at ' + cnames[i]
else:
remove_cname(app)
|
<commit_before><commit_msg>Allow came swap between two applications.<commit_after>#!/usr/bin/env python
import os
import sys
import httplib
import json
if len(sys.argv) != 3:
print """ERROR: wrong number of arguments.
Usage: tsuru swap-cname app1-name app2-name
Swap cname between two apps."""
sys.exit(1)
def get_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("GET", "/apps/" + app, "", headers)
response = conn.getresponse()
data = json.loads(response.read())
if len(data.get("cname")) == 0:
return None
return data.get("cname")
def remove_cname(app):
headers = {"Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("DELETE", "/apps/" + app + '/cname', '', headers)
response = conn.getresponse()
def set_cname(app, cname):
headers = {"Content-Type" : "application/json", "Authorization" : "bearer " + token}
conn = httplib.HTTPConnection(target)
conn.request("POST", "/apps/" + app + '/cname', '{"cname": "' + cname + '"}', headers)
response = conn.getresponse()
token = os.environ['TSURU_TOKEN']
target = os.environ['TSURU_TARGET']
apps = [sys.argv[1], sys.argv[2]]
cnames = [get_cname(apps[1]), get_cname(apps[0])]
for i,app in enumerate(apps):
if cnames[i] is not None:
set_cname(app, cnames[i])
print 'app ' + app + ' is live at ' + cnames[i]
else:
remove_cname(app)
|
|
fecd1e39592ea420039be37717f2b641249b5458
|
examples/plot_2d.py
|
examples/plot_2d.py
|
"""Visualizes batch ('population') optimizers as 2D plots."""
import numpy as np
from heuristic_optimization.optimizers import ParticleSwarmOptimizer
try:
import matplotlib.pyplot as plt
except ImportError:
print("This requires matplotlib (despite not being in the dependencies), but it was not found. Exiting.")
raise
def plot(points, bounds=None):
"""Plots points in 2D (scatter plot).
Any additional dimensions are ignored."""
plt.scatter(points[:, 0], points[:, 1])
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def plot_history(history, bounds=None):
"""Plots line along historic points."""
# shape of history: (n, m, o)
# n: number of particles
# m: dimensions of search space
# o: length of history
for i in range(len(history)):
plt.plot(history[i][0], history[i][1], 'o-')
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def weird_append(big_thing, slicey_thing):
# uh, yeah, this is just meant to append along the last axis
# not sure if there's a better way to do so. maybe relevant:
# https://stackoverflow.com/questions/8898471/concatenate-two-numpy-arrays-in-the-4th-dimension
return np.concatenate((big_thing, np.expand_dims(slicey_thing, -1)), -1)
if __name__ == '__main__':
BOUNDS = ([0, 0], [1, 1])
batch_optimizer = ParticleSwarmOptimizer(lambda A: ((A - 0.5) ** 2).sum(axis=1),
BOUNDS,
obj_fct_is_vectorized=True,
options={'num_particles': 15, 'max_iters': 10})
batch_optimizer.initialize()
history = np.expand_dims(np.copy(batch_optimizer.positions), -1)
plot_history(history, BOUNDS)
while not batch_optimizer.stop():
batch_optimizer.iteration += 1
batch_optimizer.iterate()
history = weird_append(history, batch_optimizer.positions)
plot_history(history, BOUNDS)
plot(batch_optimizer.positions)
|
Add 2d plot example for batch iteration optimizers
|
Add 2d plot example for batch iteration optimizers
|
Python
|
mit
|
tjanson/heuristic_optimization
|
Add 2d plot example for batch iteration optimizers
|
"""Visualizes batch ('population') optimizers as 2D plots."""
import numpy as np
from heuristic_optimization.optimizers import ParticleSwarmOptimizer
try:
import matplotlib.pyplot as plt
except ImportError:
print("This requires matplotlib (despite not being in the dependencies), but it was not found. Exiting.")
raise
def plot(points, bounds=None):
"""Plots points in 2D (scatter plot).
Any additional dimensions are ignored."""
plt.scatter(points[:, 0], points[:, 1])
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def plot_history(history, bounds=None):
"""Plots line along historic points."""
# shape of history: (n, m, o)
# n: number of particles
# m: dimensions of search space
# o: length of history
for i in range(len(history)):
plt.plot(history[i][0], history[i][1], 'o-')
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def weird_append(big_thing, slicey_thing):
# uh, yeah, this is just meant to append along the last axis
# not sure if there's a better way to do so. maybe relevant:
# https://stackoverflow.com/questions/8898471/concatenate-two-numpy-arrays-in-the-4th-dimension
return np.concatenate((big_thing, np.expand_dims(slicey_thing, -1)), -1)
if __name__ == '__main__':
BOUNDS = ([0, 0], [1, 1])
batch_optimizer = ParticleSwarmOptimizer(lambda A: ((A - 0.5) ** 2).sum(axis=1),
BOUNDS,
obj_fct_is_vectorized=True,
options={'num_particles': 15, 'max_iters': 10})
batch_optimizer.initialize()
history = np.expand_dims(np.copy(batch_optimizer.positions), -1)
plot_history(history, BOUNDS)
while not batch_optimizer.stop():
batch_optimizer.iteration += 1
batch_optimizer.iterate()
history = weird_append(history, batch_optimizer.positions)
plot_history(history, BOUNDS)
plot(batch_optimizer.positions)
|
<commit_before><commit_msg>Add 2d plot example for batch iteration optimizers<commit_after>
|
"""Visualizes batch ('population') optimizers as 2D plots."""
import numpy as np
from heuristic_optimization.optimizers import ParticleSwarmOptimizer
try:
import matplotlib.pyplot as plt
except ImportError:
print("This requires matplotlib (despite not being in the dependencies), but it was not found. Exiting.")
raise
def plot(points, bounds=None):
"""Plots points in 2D (scatter plot).
Any additional dimensions are ignored."""
plt.scatter(points[:, 0], points[:, 1])
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def plot_history(history, bounds=None):
"""Plots line along historic points."""
# shape of history: (n, m, o)
# n: number of particles
# m: dimensions of search space
# o: length of history
for i in range(len(history)):
plt.plot(history[i][0], history[i][1], 'o-')
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def weird_append(big_thing, slicey_thing):
# uh, yeah, this is just meant to append along the last axis
# not sure if there's a better way to do so. maybe relevant:
# https://stackoverflow.com/questions/8898471/concatenate-two-numpy-arrays-in-the-4th-dimension
return np.concatenate((big_thing, np.expand_dims(slicey_thing, -1)), -1)
if __name__ == '__main__':
BOUNDS = ([0, 0], [1, 1])
batch_optimizer = ParticleSwarmOptimizer(lambda A: ((A - 0.5) ** 2).sum(axis=1),
BOUNDS,
obj_fct_is_vectorized=True,
options={'num_particles': 15, 'max_iters': 10})
batch_optimizer.initialize()
history = np.expand_dims(np.copy(batch_optimizer.positions), -1)
plot_history(history, BOUNDS)
while not batch_optimizer.stop():
batch_optimizer.iteration += 1
batch_optimizer.iterate()
history = weird_append(history, batch_optimizer.positions)
plot_history(history, BOUNDS)
plot(batch_optimizer.positions)
|
Add 2d plot example for batch iteration optimizers"""Visualizes batch ('population') optimizers as 2D plots."""
import numpy as np
from heuristic_optimization.optimizers import ParticleSwarmOptimizer
try:
import matplotlib.pyplot as plt
except ImportError:
print("This requires matplotlib (despite not being in the dependencies), but it was not found. Exiting.")
raise
def plot(points, bounds=None):
"""Plots points in 2D (scatter plot).
Any additional dimensions are ignored."""
plt.scatter(points[:, 0], points[:, 1])
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def plot_history(history, bounds=None):
"""Plots line along historic points."""
# shape of history: (n, m, o)
# n: number of particles
# m: dimensions of search space
# o: length of history
for i in range(len(history)):
plt.plot(history[i][0], history[i][1], 'o-')
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def weird_append(big_thing, slicey_thing):
# uh, yeah, this is just meant to append along the last axis
# not sure if there's a better way to do so. maybe relevant:
# https://stackoverflow.com/questions/8898471/concatenate-two-numpy-arrays-in-the-4th-dimension
return np.concatenate((big_thing, np.expand_dims(slicey_thing, -1)), -1)
if __name__ == '__main__':
BOUNDS = ([0, 0], [1, 1])
batch_optimizer = ParticleSwarmOptimizer(lambda A: ((A - 0.5) ** 2).sum(axis=1),
BOUNDS,
obj_fct_is_vectorized=True,
options={'num_particles': 15, 'max_iters': 10})
batch_optimizer.initialize()
history = np.expand_dims(np.copy(batch_optimizer.positions), -1)
plot_history(history, BOUNDS)
while not batch_optimizer.stop():
batch_optimizer.iteration += 1
batch_optimizer.iterate()
history = weird_append(history, batch_optimizer.positions)
plot_history(history, BOUNDS)
plot(batch_optimizer.positions)
|
<commit_before><commit_msg>Add 2d plot example for batch iteration optimizers<commit_after>"""Visualizes batch ('population') optimizers as 2D plots."""
import numpy as np
from heuristic_optimization.optimizers import ParticleSwarmOptimizer
try:
import matplotlib.pyplot as plt
except ImportError:
print("This requires matplotlib (despite not being in the dependencies), but it was not found. Exiting.")
raise
def plot(points, bounds=None):
"""Plots points in 2D (scatter plot).
Any additional dimensions are ignored."""
plt.scatter(points[:, 0], points[:, 1])
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def plot_history(history, bounds=None):
"""Plots line along historic points."""
# shape of history: (n, m, o)
# n: number of particles
# m: dimensions of search space
# o: length of history
for i in range(len(history)):
plt.plot(history[i][0], history[i][1], 'o-')
if bounds is not None:
plt.xlim(bounds[0][0], bounds[1][0])
plt.ylim(bounds[0][1], bounds[1][1])
plt.show()
def weird_append(big_thing, slicey_thing):
# uh, yeah, this is just meant to append along the last axis
# not sure if there's a better way to do so. maybe relevant:
# https://stackoverflow.com/questions/8898471/concatenate-two-numpy-arrays-in-the-4th-dimension
return np.concatenate((big_thing, np.expand_dims(slicey_thing, -1)), -1)
if __name__ == '__main__':
BOUNDS = ([0, 0], [1, 1])
batch_optimizer = ParticleSwarmOptimizer(lambda A: ((A - 0.5) ** 2).sum(axis=1),
BOUNDS,
obj_fct_is_vectorized=True,
options={'num_particles': 15, 'max_iters': 10})
batch_optimizer.initialize()
history = np.expand_dims(np.copy(batch_optimizer.positions), -1)
plot_history(history, BOUNDS)
while not batch_optimizer.stop():
batch_optimizer.iteration += 1
batch_optimizer.iterate()
history = weird_append(history, batch_optimizer.positions)
plot_history(history, BOUNDS)
plot(batch_optimizer.positions)
|
|
c10ace67fcb209955647426ac776fd1821fb6f86
|
read_serial.py
|
read_serial.py
|
__author__ = 'miahi'
## Serial logger for APC Smart UPS
import serial
import csv
import time
import datetime
PORT = 'COM2'
BAUDRATE = 2400
SLEEP_SECONDS = 3
class APCSerial(object):
def __init__(self, port, baudrate=2400):
# todo: check that port exists & init errors
self.serial = serial.Serial(port, baudrate, timeout=1)
self.serial.write('Y')
mode = self.serial.readline()
# todo: test init in Smart mode (UPS returns 'SM')
def read_power(self):
return self._read_number('P')
def read_batt_voltage(self):
return self._read_number('B')
def read_temperature(self):
return self._read_number('C')
def read_frequency(self):
return self._read_number('F')
def read_line_voltage(self):
return self._read_number('L')
def read_max_line_voltage(self):
return self._read_number('M')
def read_min_line_voltage(self):
return self._read_number('N')
def read_output_voltage(self):
return self._read_number('O')
def read_batt_level(self):
return self._read_number('f')
def read_runtime(self):
self.serial.write('j')
response = self.serial.readline()
return int(float(response.rstrip().rstrip(':')))
def _read_number(self, command):
self.serial.write(command)
response = self.serial.readline()
return float(response.rstrip())
def main():
apcserial = APCSerial(PORT, BAUDRATE)
filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv'
with open(filename, 'a+b', buffering=1) as csvfile:
outwriter = csv.writer(csvfile, delimiter=',')
outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]',
'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]',
'Temperature[C]'])
while True:
outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(),
apcserial.read_batt_voltage(),
apcserial.read_line_voltage(), apcserial.read_max_line_voltage(),
apcserial.read_min_line_voltage(), apcserial.read_output_voltage(),
apcserial.read_frequency(),
apcserial.read_runtime(), apcserial.read_temperature()])
csvfile.flush()
time.sleep(SLEEP_SECONDS)
if __name__ == '__main__':
main()
|
Read from serial, write to CVS
|
Read from serial, write to CVS
|
Python
|
mit
|
miahi/python.apcserial
|
Read from serial, write to CVS
|
__author__ = 'miahi'
## Serial logger for APC Smart UPS
import serial
import csv
import time
import datetime
PORT = 'COM2'
BAUDRATE = 2400
SLEEP_SECONDS = 3
class APCSerial(object):
def __init__(self, port, baudrate=2400):
# todo: check that port exists & init errors
self.serial = serial.Serial(port, baudrate, timeout=1)
self.serial.write('Y')
mode = self.serial.readline()
# todo: test init in Smart mode (UPS returns 'SM')
def read_power(self):
return self._read_number('P')
def read_batt_voltage(self):
return self._read_number('B')
def read_temperature(self):
return self._read_number('C')
def read_frequency(self):
return self._read_number('F')
def read_line_voltage(self):
return self._read_number('L')
def read_max_line_voltage(self):
return self._read_number('M')
def read_min_line_voltage(self):
return self._read_number('N')
def read_output_voltage(self):
return self._read_number('O')
def read_batt_level(self):
return self._read_number('f')
def read_runtime(self):
self.serial.write('j')
response = self.serial.readline()
return int(float(response.rstrip().rstrip(':')))
def _read_number(self, command):
self.serial.write(command)
response = self.serial.readline()
return float(response.rstrip())
def main():
apcserial = APCSerial(PORT, BAUDRATE)
filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv'
with open(filename, 'a+b', buffering=1) as csvfile:
outwriter = csv.writer(csvfile, delimiter=',')
outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]',
'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]',
'Temperature[C]'])
while True:
outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(),
apcserial.read_batt_voltage(),
apcserial.read_line_voltage(), apcserial.read_max_line_voltage(),
apcserial.read_min_line_voltage(), apcserial.read_output_voltage(),
apcserial.read_frequency(),
apcserial.read_runtime(), apcserial.read_temperature()])
csvfile.flush()
time.sleep(SLEEP_SECONDS)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Read from serial, write to CVS<commit_after>
|
__author__ = 'miahi'
## Serial logger for APC Smart UPS
import serial
import csv
import time
import datetime
PORT = 'COM2'
BAUDRATE = 2400
SLEEP_SECONDS = 3
class APCSerial(object):
def __init__(self, port, baudrate=2400):
# todo: check that port exists & init errors
self.serial = serial.Serial(port, baudrate, timeout=1)
self.serial.write('Y')
mode = self.serial.readline()
# todo: test init in Smart mode (UPS returns 'SM')
def read_power(self):
return self._read_number('P')
def read_batt_voltage(self):
return self._read_number('B')
def read_temperature(self):
return self._read_number('C')
def read_frequency(self):
return self._read_number('F')
def read_line_voltage(self):
return self._read_number('L')
def read_max_line_voltage(self):
return self._read_number('M')
def read_min_line_voltage(self):
return self._read_number('N')
def read_output_voltage(self):
return self._read_number('O')
def read_batt_level(self):
return self._read_number('f')
def read_runtime(self):
self.serial.write('j')
response = self.serial.readline()
return int(float(response.rstrip().rstrip(':')))
def _read_number(self, command):
self.serial.write(command)
response = self.serial.readline()
return float(response.rstrip())
def main():
apcserial = APCSerial(PORT, BAUDRATE)
filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv'
with open(filename, 'a+b', buffering=1) as csvfile:
outwriter = csv.writer(csvfile, delimiter=',')
outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]',
'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]',
'Temperature[C]'])
while True:
outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(),
apcserial.read_batt_voltage(),
apcserial.read_line_voltage(), apcserial.read_max_line_voltage(),
apcserial.read_min_line_voltage(), apcserial.read_output_voltage(),
apcserial.read_frequency(),
apcserial.read_runtime(), apcserial.read_temperature()])
csvfile.flush()
time.sleep(SLEEP_SECONDS)
if __name__ == '__main__':
main()
|
Read from serial, write to CVS__author__ = 'miahi'
## Serial logger for APC Smart UPS
import serial
import csv
import time
import datetime
PORT = 'COM2'
BAUDRATE = 2400
SLEEP_SECONDS = 3
class APCSerial(object):
def __init__(self, port, baudrate=2400):
# todo: check that port exists & init errors
self.serial = serial.Serial(port, baudrate, timeout=1)
self.serial.write('Y')
mode = self.serial.readline()
# todo: test init in Smart mode (UPS returns 'SM')
def read_power(self):
return self._read_number('P')
def read_batt_voltage(self):
return self._read_number('B')
def read_temperature(self):
return self._read_number('C')
def read_frequency(self):
return self._read_number('F')
def read_line_voltage(self):
return self._read_number('L')
def read_max_line_voltage(self):
return self._read_number('M')
def read_min_line_voltage(self):
return self._read_number('N')
def read_output_voltage(self):
return self._read_number('O')
def read_batt_level(self):
return self._read_number('f')
def read_runtime(self):
self.serial.write('j')
response = self.serial.readline()
return int(float(response.rstrip().rstrip(':')))
def _read_number(self, command):
self.serial.write(command)
response = self.serial.readline()
return float(response.rstrip())
def main():
apcserial = APCSerial(PORT, BAUDRATE)
filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv'
with open(filename, 'a+b', buffering=1) as csvfile:
outwriter = csv.writer(csvfile, delimiter=',')
outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]',
'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]',
'Temperature[C]'])
while True:
outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(),
apcserial.read_batt_voltage(),
apcserial.read_line_voltage(), apcserial.read_max_line_voltage(),
apcserial.read_min_line_voltage(), apcserial.read_output_voltage(),
apcserial.read_frequency(),
apcserial.read_runtime(), apcserial.read_temperature()])
csvfile.flush()
time.sleep(SLEEP_SECONDS)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Read from serial, write to CVS<commit_after>__author__ = 'miahi'
## Serial logger for APC Smart UPS
import serial
import csv
import time
import datetime
PORT = 'COM2'
BAUDRATE = 2400
SLEEP_SECONDS = 3
class APCSerial(object):
def __init__(self, port, baudrate=2400):
# todo: check that port exists & init errors
self.serial = serial.Serial(port, baudrate, timeout=1)
self.serial.write('Y')
mode = self.serial.readline()
# todo: test init in Smart mode (UPS returns 'SM')
def read_power(self):
return self._read_number('P')
def read_batt_voltage(self):
return self._read_number('B')
def read_temperature(self):
return self._read_number('C')
def read_frequency(self):
return self._read_number('F')
def read_line_voltage(self):
return self._read_number('L')
def read_max_line_voltage(self):
return self._read_number('M')
def read_min_line_voltage(self):
return self._read_number('N')
def read_output_voltage(self):
return self._read_number('O')
def read_batt_level(self):
return self._read_number('f')
def read_runtime(self):
self.serial.write('j')
response = self.serial.readline()
return int(float(response.rstrip().rstrip(':')))
def _read_number(self, command):
self.serial.write(command)
response = self.serial.readline()
return float(response.rstrip())
def main():
apcserial = APCSerial(PORT, BAUDRATE)
filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv'
with open(filename, 'a+b', buffering=1) as csvfile:
outwriter = csv.writer(csvfile, delimiter=',')
outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]',
'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]',
'Temperature[C]'])
while True:
outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(),
apcserial.read_batt_voltage(),
apcserial.read_line_voltage(), apcserial.read_max_line_voltage(),
apcserial.read_min_line_voltage(), apcserial.read_output_voltage(),
apcserial.read_frequency(),
apcserial.read_runtime(), apcserial.read_temperature()])
csvfile.flush()
time.sleep(SLEEP_SECONDS)
if __name__ == '__main__':
main()
|
|
dc136bb96455944436b56e39ce6929799036bfa8
|
neuroanalysis/stimuli.py
|
neuroanalysis/stimuli.py
|
import numpy as np
def square_pulses(trace, baseline=None):
"""Return a list of (start, stop, amp) tuples describing square pulses
in the stimulus.
A pulse is defined as any contiguous region of the stimulus waveform
that has a constant value other than the baseline. If no baseline is
specified, then it the first sample in the stimulus is used.
Parameters
----------
trace : Trace instance
The stimulus command waveform. This data should be noise-free.
baseline : float | None
Specifies the value in the command waveform that is considered to be
"no pulse". If no baseline is specified, then the first sample of
*trace* is used.
"""
if baseline is None:
baseline = trace[0]
sdiff = np.diff(trace)
changes = np.argwhere(sdiff != 0)[:, 0] + 1
pulses = []
for i, start in enumerate(changes):
amp = trace[start]
if amp != baseline:
stop = changes[i+1] if (i+1 < len(changes)) else len(trace)
pulses.append((start, stop, amp))
return pulses
|
Add module for analyzing stimulus waveforms
|
Add module for analyzing stimulus waveforms
|
Python
|
mit
|
campagnola/neuroanalysis
|
Add module for analyzing stimulus waveforms
|
import numpy as np
def square_pulses(trace, baseline=None):
"""Return a list of (start, stop, amp) tuples describing square pulses
in the stimulus.
A pulse is defined as any contiguous region of the stimulus waveform
that has a constant value other than the baseline. If no baseline is
specified, then it the first sample in the stimulus is used.
Parameters
----------
trace : Trace instance
The stimulus command waveform. This data should be noise-free.
baseline : float | None
Specifies the value in the command waveform that is considered to be
"no pulse". If no baseline is specified, then the first sample of
*trace* is used.
"""
if baseline is None:
baseline = trace[0]
sdiff = np.diff(trace)
changes = np.argwhere(sdiff != 0)[:, 0] + 1
pulses = []
for i, start in enumerate(changes):
amp = trace[start]
if amp != baseline:
stop = changes[i+1] if (i+1 < len(changes)) else len(trace)
pulses.append((start, stop, amp))
return pulses
|
<commit_before><commit_msg>Add module for analyzing stimulus waveforms<commit_after>
|
import numpy as np
def square_pulses(trace, baseline=None):
"""Return a list of (start, stop, amp) tuples describing square pulses
in the stimulus.
A pulse is defined as any contiguous region of the stimulus waveform
that has a constant value other than the baseline. If no baseline is
specified, then it the first sample in the stimulus is used.
Parameters
----------
trace : Trace instance
The stimulus command waveform. This data should be noise-free.
baseline : float | None
Specifies the value in the command waveform that is considered to be
"no pulse". If no baseline is specified, then the first sample of
*trace* is used.
"""
if baseline is None:
baseline = trace[0]
sdiff = np.diff(trace)
changes = np.argwhere(sdiff != 0)[:, 0] + 1
pulses = []
for i, start in enumerate(changes):
amp = trace[start]
if amp != baseline:
stop = changes[i+1] if (i+1 < len(changes)) else len(trace)
pulses.append((start, stop, amp))
return pulses
|
Add module for analyzing stimulus waveformsimport numpy as np
def square_pulses(trace, baseline=None):
"""Return a list of (start, stop, amp) tuples describing square pulses
in the stimulus.
A pulse is defined as any contiguous region of the stimulus waveform
that has a constant value other than the baseline. If no baseline is
specified, then it the first sample in the stimulus is used.
Parameters
----------
trace : Trace instance
The stimulus command waveform. This data should be noise-free.
baseline : float | None
Specifies the value in the command waveform that is considered to be
"no pulse". If no baseline is specified, then the first sample of
*trace* is used.
"""
if baseline is None:
baseline = trace[0]
sdiff = np.diff(trace)
changes = np.argwhere(sdiff != 0)[:, 0] + 1
pulses = []
for i, start in enumerate(changes):
amp = trace[start]
if amp != baseline:
stop = changes[i+1] if (i+1 < len(changes)) else len(trace)
pulses.append((start, stop, amp))
return pulses
|
<commit_before><commit_msg>Add module for analyzing stimulus waveforms<commit_after>import numpy as np
def square_pulses(trace, baseline=None):
"""Return a list of (start, stop, amp) tuples describing square pulses
in the stimulus.
A pulse is defined as any contiguous region of the stimulus waveform
that has a constant value other than the baseline. If no baseline is
specified, then it the first sample in the stimulus is used.
Parameters
----------
trace : Trace instance
The stimulus command waveform. This data should be noise-free.
baseline : float | None
Specifies the value in the command waveform that is considered to be
"no pulse". If no baseline is specified, then the first sample of
*trace* is used.
"""
if baseline is None:
baseline = trace[0]
sdiff = np.diff(trace)
changes = np.argwhere(sdiff != 0)[:, 0] + 1
pulses = []
for i, start in enumerate(changes):
amp = trace[start]
if amp != baseline:
stop = changes[i+1] if (i+1 < len(changes)) else len(trace)
pulses.append((start, stop, amp))
return pulses
|
|
adf46f5b90d04ce8e26701810b6c23bc230ddc37
|
nova/conf/consoleauth.py
|
nova/conf/consoleauth.py
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
Add an additional description for 'token_ttl'
|
Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6
|
Python
|
apache-2.0
|
openstack/nova,klmitch/nova,klmitch/nova,mahak/nova,phenoxim/nova,gooddata/openstack-nova,mikalstill/nova,gooddata/openstack-nova,mahak/nova,rahulunair/nova,klmitch/nova,klmitch/nova,rahulunair/nova,mikalstill/nova,gooddata/openstack-nova,mikalstill/nova,rahulunair/nova,openstack/nova,phenoxim/nova,openstack/nova,mahak/nova,gooddata/openstack-nova
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
<commit_before># Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
<commit_msg>Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6<commit_after>
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6# Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
<commit_before># Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
<commit_msg>Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6<commit_after># Copyright (c) 2016 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
a4a25587edbfef824b131c22f8fb6c17ccce6abe
|
openfda-1/get_drug.py
|
openfda-1/get_drug.py
|
import http.client
import json
headers = {'User-Agent': 'http-client'}
conn = http.client.HTTPSConnection("api.fda.gov")
# Get a https://api.fda.gov/drug/label.json drug label from this URL and extract what is the id,
# the purpose of the drug and the manufacturer_name
conn.request("GET", "/drug/label.json", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)
drug = drugs['results'][0]
drug_id = drug['id']
drug_purpose = drug['purpose'][0]
drug_manufacturer_name = drug['openfda']['manufacturer_name'][0]
print(drug_id, drug_purpose, drug_manufacturer_name)
# Get 10 drugs and extract from all of them the id (tip: use the limit param for it)
conn.request("GET", "/drug/label.json?limit=10", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)['results']
for drug in drugs:
print(drug['id'])
|
Add openfda1 get drugs practice
|
Add openfda1 get drugs practice
|
Python
|
apache-2.0
|
acs-test/openfda,acs-test/openfda
|
Add openfda1 get drugs practice
|
import http.client
import json
headers = {'User-Agent': 'http-client'}
conn = http.client.HTTPSConnection("api.fda.gov")
# Get a https://api.fda.gov/drug/label.json drug label from this URL and extract what is the id,
# the purpose of the drug and the manufacturer_name
conn.request("GET", "/drug/label.json", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)
drug = drugs['results'][0]
drug_id = drug['id']
drug_purpose = drug['purpose'][0]
drug_manufacturer_name = drug['openfda']['manufacturer_name'][0]
print(drug_id, drug_purpose, drug_manufacturer_name)
# Get 10 drugs and extract from all of them the id (tip: use the limit param for it)
conn.request("GET", "/drug/label.json?limit=10", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)['results']
for drug in drugs:
print(drug['id'])
|
<commit_before><commit_msg>Add openfda1 get drugs practice<commit_after>
|
import http.client
import json
headers = {'User-Agent': 'http-client'}
conn = http.client.HTTPSConnection("api.fda.gov")
# Get a https://api.fda.gov/drug/label.json drug label from this URL and extract what is the id,
# the purpose of the drug and the manufacturer_name
conn.request("GET", "/drug/label.json", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)
drug = drugs['results'][0]
drug_id = drug['id']
drug_purpose = drug['purpose'][0]
drug_manufacturer_name = drug['openfda']['manufacturer_name'][0]
print(drug_id, drug_purpose, drug_manufacturer_name)
# Get 10 drugs and extract from all of them the id (tip: use the limit param for it)
conn.request("GET", "/drug/label.json?limit=10", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)['results']
for drug in drugs:
print(drug['id'])
|
Add openfda1 get drugs practiceimport http.client
import json
headers = {'User-Agent': 'http-client'}
conn = http.client.HTTPSConnection("api.fda.gov")
# Get a https://api.fda.gov/drug/label.json drug label from this URL and extract what is the id,
# the purpose of the drug and the manufacturer_name
conn.request("GET", "/drug/label.json", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)
drug = drugs['results'][0]
drug_id = drug['id']
drug_purpose = drug['purpose'][0]
drug_manufacturer_name = drug['openfda']['manufacturer_name'][0]
print(drug_id, drug_purpose, drug_manufacturer_name)
# Get 10 drugs and extract from all of them the id (tip: use the limit param for it)
conn.request("GET", "/drug/label.json?limit=10", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)['results']
for drug in drugs:
print(drug['id'])
|
<commit_before><commit_msg>Add openfda1 get drugs practice<commit_after>import http.client
import json
headers = {'User-Agent': 'http-client'}
conn = http.client.HTTPSConnection("api.fda.gov")
# Get a https://api.fda.gov/drug/label.json drug label from this URL and extract what is the id,
# the purpose of the drug and the manufacturer_name
conn.request("GET", "/drug/label.json", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)
drug = drugs['results'][0]
drug_id = drug['id']
drug_purpose = drug['purpose'][0]
drug_manufacturer_name = drug['openfda']['manufacturer_name'][0]
print(drug_id, drug_purpose, drug_manufacturer_name)
# Get 10 drugs and extract from all of them the id (tip: use the limit param for it)
conn.request("GET", "/drug/label.json?limit=10", None, headers)
r1 = conn.getresponse()
print(r1.status, r1.reason)
drugs_raw = r1.read().decode("utf-8")
conn.close()
drugs = json.loads(drugs_raw)['results']
for drug in drugs:
print(drug['id'])
|
|
64806d4b3a0fb277ffe07e9d9670c4e80563f56b
|
text_to_speech.py
|
text_to_speech.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Asks for coffee using different Python modules.
Requires macspeechX module:
* https://pypi.python.org/pypi/macspeechX/
* http://old.nabble.com/Re:-py-access-to-speech-synth--p18845607.html
"""
from macspeechX import SpeakString
import pyttsx
# Using macspeechX
# Note: sometimes doesn't work. It's a bit unpredictable.
# Seems to work in the Python interactive shell only.
SpeakString("Can you please make me some coffee, dearest?")
SpeakString("Please?")
SpeakString("Perché è questo caffè così amaro?")
# Using pyttsx
engine = pyttsx.init()
engine.say('Can you please get me some coffee?')
engine.say('Please?')
# Note: looks like it doesn't work with accented characters (yet)
engine.say("Perché è questo caffè così amaro?")
engine.runAndWait()
|
Add text to speech example.
|
Add text to speech example.
|
Python
|
bsd-3-clause
|
audreyr/useful
|
Add text to speech example.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Asks for coffee using different Python modules.
Requires macspeechX module:
* https://pypi.python.org/pypi/macspeechX/
* http://old.nabble.com/Re:-py-access-to-speech-synth--p18845607.html
"""
from macspeechX import SpeakString
import pyttsx
# Using macspeechX
# Note: sometimes doesn't work. It's a bit unpredictable.
# Seems to work in the Python interactive shell only.
SpeakString("Can you please make me some coffee, dearest?")
SpeakString("Please?")
SpeakString("Perché è questo caffè così amaro?")
# Using pyttsx
engine = pyttsx.init()
engine.say('Can you please get me some coffee?')
engine.say('Please?')
# Note: looks like it doesn't work with accented characters (yet)
engine.say("Perché è questo caffè così amaro?")
engine.runAndWait()
|
<commit_before><commit_msg>Add text to speech example.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Asks for coffee using different Python modules.
Requires macspeechX module:
* https://pypi.python.org/pypi/macspeechX/
* http://old.nabble.com/Re:-py-access-to-speech-synth--p18845607.html
"""
from macspeechX import SpeakString
import pyttsx
# Using macspeechX
# Note: sometimes doesn't work. It's a bit unpredictable.
# Seems to work in the Python interactive shell only.
SpeakString("Can you please make me some coffee, dearest?")
SpeakString("Please?")
SpeakString("Perché è questo caffè così amaro?")
# Using pyttsx
engine = pyttsx.init()
engine.say('Can you please get me some coffee?')
engine.say('Please?')
# Note: looks like it doesn't work with accented characters (yet)
engine.say("Perché è questo caffè così amaro?")
engine.runAndWait()
|
Add text to speech example.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Asks for coffee using different Python modules.
Requires macspeechX module:
* https://pypi.python.org/pypi/macspeechX/
* http://old.nabble.com/Re:-py-access-to-speech-synth--p18845607.html
"""
from macspeechX import SpeakString
import pyttsx
# Using macspeechX
# Note: sometimes doesn't work. It's a bit unpredictable.
# Seems to work in the Python interactive shell only.
SpeakString("Can you please make me some coffee, dearest?")
SpeakString("Please?")
SpeakString("Perché è questo caffè così amaro?")
# Using pyttsx
engine = pyttsx.init()
engine.say('Can you please get me some coffee?')
engine.say('Please?')
# Note: looks like it doesn't work with accented characters (yet)
engine.say("Perché è questo caffè così amaro?")
engine.runAndWait()
|
<commit_before><commit_msg>Add text to speech example.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Asks for coffee using different Python modules.
Requires macspeechX module:
* https://pypi.python.org/pypi/macspeechX/
* http://old.nabble.com/Re:-py-access-to-speech-synth--p18845607.html
"""
from macspeechX import SpeakString
import pyttsx
# Using macspeechX
# Note: sometimes doesn't work. It's a bit unpredictable.
# Seems to work in the Python interactive shell only.
SpeakString("Can you please make me some coffee, dearest?")
SpeakString("Please?")
SpeakString("Perché è questo caffè così amaro?")
# Using pyttsx
engine = pyttsx.init()
engine.say('Can you please get me some coffee?')
engine.say('Please?')
# Note: looks like it doesn't work with accented characters (yet)
engine.say("Perché è questo caffè così amaro?")
engine.runAndWait()
|
|
118fc8570b15700a62e5cb5aa7c3d1bfe70c9dc6
|
Python/concurrency/furutes_prot.py
|
Python/concurrency/furutes_prot.py
|
# concurrent.futures - Launch parallel tasks
import concurrent.futures as ft
def main():
with ft.ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fnc, 323, 4)
print(future.result())
#future = executor.map(fnc, (33, 22))
#print(future.result())
def fnc(x, y):
return x**y
if __name__ == '__main__':
main()
|
Add concurrent.future ti Python prototypes
|
Add concurrent.future ti Python prototypes
|
Python
|
apache-2.0
|
yuriyshapovalov/Prototypes,yuriyshapovalov/Prototypes,yuriyshapovalov/Prototypes
|
Add concurrent.future ti Python prototypes
|
# concurrent.futures - Launch parallel tasks
import concurrent.futures as ft
def main():
with ft.ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fnc, 323, 4)
print(future.result())
#future = executor.map(fnc, (33, 22))
#print(future.result())
def fnc(x, y):
return x**y
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add concurrent.future ti Python prototypes<commit_after>
|
# concurrent.futures - Launch parallel tasks
import concurrent.futures as ft
def main():
with ft.ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fnc, 323, 4)
print(future.result())
#future = executor.map(fnc, (33, 22))
#print(future.result())
def fnc(x, y):
return x**y
if __name__ == '__main__':
main()
|
Add concurrent.future ti Python prototypes# concurrent.futures - Launch parallel tasks
import concurrent.futures as ft
def main():
with ft.ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fnc, 323, 4)
print(future.result())
#future = executor.map(fnc, (33, 22))
#print(future.result())
def fnc(x, y):
return x**y
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add concurrent.future ti Python prototypes<commit_after># concurrent.futures - Launch parallel tasks
import concurrent.futures as ft
def main():
with ft.ThreadPoolExecutor(max_workers=1) as executor:
future = executor.submit(fnc, 323, 4)
print(future.result())
#future = executor.map(fnc, (33, 22))
#print(future.result())
def fnc(x, y):
return x**y
if __name__ == '__main__':
main()
|
|
18e14ba67c245f2a9ec24b05d5c504452a1f299d
|
robo/test/test_json_dump.py
|
robo/test/test_json_dump.py
|
'''
Created on: June 5th, 2016
@author: Numair Mansur (numair.mansur@gmail.com)
'''
import unittest
class TestJsonMethods(unittest.TestCase):
def test_json_base_solver(self):
assert True
def test_json_base_model(self):
assert True
def test_json_base_task(self):
assert True
def test_json_base_acquisition(self):
assert True
|
Create Json unit test file
|
Create Json unit test file
|
Python
|
bsd-3-clause
|
aaronkl/RoBO,aaronkl/RoBO,aaronkl/RoBO,automl/RoBO,numairmansur/RoBO,automl/RoBO,numairmansur/RoBO
|
Create Json unit test file
|
'''
Created on: June 5th, 2016
@author: Numair Mansur (numair.mansur@gmail.com)
'''
import unittest
class TestJsonMethods(unittest.TestCase):
def test_json_base_solver(self):
assert True
def test_json_base_model(self):
assert True
def test_json_base_task(self):
assert True
def test_json_base_acquisition(self):
assert True
|
<commit_before><commit_msg>Create Json unit test file<commit_after>
|
'''
Created on: June 5th, 2016
@author: Numair Mansur (numair.mansur@gmail.com)
'''
import unittest
class TestJsonMethods(unittest.TestCase):
def test_json_base_solver(self):
assert True
def test_json_base_model(self):
assert True
def test_json_base_task(self):
assert True
def test_json_base_acquisition(self):
assert True
|
Create Json unit test file'''
Created on: June 5th, 2016
@author: Numair Mansur (numair.mansur@gmail.com)
'''
import unittest
class TestJsonMethods(unittest.TestCase):
def test_json_base_solver(self):
assert True
def test_json_base_model(self):
assert True
def test_json_base_task(self):
assert True
def test_json_base_acquisition(self):
assert True
|
<commit_before><commit_msg>Create Json unit test file<commit_after>'''
Created on: June 5th, 2016
@author: Numair Mansur (numair.mansur@gmail.com)
'''
import unittest
class TestJsonMethods(unittest.TestCase):
def test_json_base_solver(self):
assert True
def test_json_base_model(self):
assert True
def test_json_base_task(self):
assert True
def test_json_base_acquisition(self):
assert True
|
|
9d0f386d419097e68f1a2155333236e1c4ed3108
|
config/migrations/0003_auto_20181031_2340.py
|
config/migrations/0003_auto_20181031_2340.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-10-31 23:40
from __future__ import unicode_literals
from django.db import migrations
from config.restype import RESTYPE
def updatesdstorageaddress(apps, schema_editor):
ConfResource = apps.get_model('config', 'ConfResource')
applied = ConfResource.objects.filter(type_id=RESTYPE['SDAddresses']).count()
if applied == 0:
ConfComponent = apps.get_model('config', 'ConfComponent')
ConfParameter = apps.get_model('config', 'ConfParameter')
sdcompid = ConfComponent.objects.filter(type='S')
for sd in sdcompid:
sdresid = ConfResource.objects.filter(compid=sd, type__name='Storage')
for sdid in sdresid:
addrquery = ConfParameter.objects.filter(resid=sdid, name='SDAddress')
portquery = ConfParameter.objects.filter(resid=sdid, name='SDPort')
address = addrquery[0].value
addrquery.delete()
portquery.delete()
sdaddrsid = ConfResource(compid=sd, name='', sub=sdid.resid, type_id=RESTYPE['SDAddresses'], description='')
sdaddrsid.save()
ipsid = ConfResource(compid=sd, name='', sub=sdaddrsid.resid, type_id=RESTYPE['IP'], description='')
ipsid.save()
a = ConfParameter(resid=ipsid, name='Addr', value=address)
a.save()
p = ConfParameter(resid=ipsid, name='Port', value=9103)
p.save()
class Migration(migrations.Migration):
dependencies = [
('config', '0002_confrtype_equ'),
]
operations = [
migrations.RunPython(updatesdstorageaddress),
]
|
Add automatic SDAddress migration to new version.
|
Add automatic SDAddress migration to new version.
|
Python
|
agpl-3.0
|
inteos/IBAdmin,inteos/IBAdmin,inteos/IBAdmin,inteos/IBAdmin,inteos/IBAdmin,inteos/IBAdmin
|
Add automatic SDAddress migration to new version.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-10-31 23:40
from __future__ import unicode_literals
from django.db import migrations
from config.restype import RESTYPE
def updatesdstorageaddress(apps, schema_editor):
ConfResource = apps.get_model('config', 'ConfResource')
applied = ConfResource.objects.filter(type_id=RESTYPE['SDAddresses']).count()
if applied == 0:
ConfComponent = apps.get_model('config', 'ConfComponent')
ConfParameter = apps.get_model('config', 'ConfParameter')
sdcompid = ConfComponent.objects.filter(type='S')
for sd in sdcompid:
sdresid = ConfResource.objects.filter(compid=sd, type__name='Storage')
for sdid in sdresid:
addrquery = ConfParameter.objects.filter(resid=sdid, name='SDAddress')
portquery = ConfParameter.objects.filter(resid=sdid, name='SDPort')
address = addrquery[0].value
addrquery.delete()
portquery.delete()
sdaddrsid = ConfResource(compid=sd, name='', sub=sdid.resid, type_id=RESTYPE['SDAddresses'], description='')
sdaddrsid.save()
ipsid = ConfResource(compid=sd, name='', sub=sdaddrsid.resid, type_id=RESTYPE['IP'], description='')
ipsid.save()
a = ConfParameter(resid=ipsid, name='Addr', value=address)
a.save()
p = ConfParameter(resid=ipsid, name='Port', value=9103)
p.save()
class Migration(migrations.Migration):
dependencies = [
('config', '0002_confrtype_equ'),
]
operations = [
migrations.RunPython(updatesdstorageaddress),
]
|
<commit_before><commit_msg>Add automatic SDAddress migration to new version.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-10-31 23:40
from __future__ import unicode_literals
from django.db import migrations
from config.restype import RESTYPE
def updatesdstorageaddress(apps, schema_editor):
ConfResource = apps.get_model('config', 'ConfResource')
applied = ConfResource.objects.filter(type_id=RESTYPE['SDAddresses']).count()
if applied == 0:
ConfComponent = apps.get_model('config', 'ConfComponent')
ConfParameter = apps.get_model('config', 'ConfParameter')
sdcompid = ConfComponent.objects.filter(type='S')
for sd in sdcompid:
sdresid = ConfResource.objects.filter(compid=sd, type__name='Storage')
for sdid in sdresid:
addrquery = ConfParameter.objects.filter(resid=sdid, name='SDAddress')
portquery = ConfParameter.objects.filter(resid=sdid, name='SDPort')
address = addrquery[0].value
addrquery.delete()
portquery.delete()
sdaddrsid = ConfResource(compid=sd, name='', sub=sdid.resid, type_id=RESTYPE['SDAddresses'], description='')
sdaddrsid.save()
ipsid = ConfResource(compid=sd, name='', sub=sdaddrsid.resid, type_id=RESTYPE['IP'], description='')
ipsid.save()
a = ConfParameter(resid=ipsid, name='Addr', value=address)
a.save()
p = ConfParameter(resid=ipsid, name='Port', value=9103)
p.save()
class Migration(migrations.Migration):
dependencies = [
('config', '0002_confrtype_equ'),
]
operations = [
migrations.RunPython(updatesdstorageaddress),
]
|
Add automatic SDAddress migration to new version.# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-10-31 23:40
from __future__ import unicode_literals
from django.db import migrations
from config.restype import RESTYPE
def updatesdstorageaddress(apps, schema_editor):
ConfResource = apps.get_model('config', 'ConfResource')
applied = ConfResource.objects.filter(type_id=RESTYPE['SDAddresses']).count()
if applied == 0:
ConfComponent = apps.get_model('config', 'ConfComponent')
ConfParameter = apps.get_model('config', 'ConfParameter')
sdcompid = ConfComponent.objects.filter(type='S')
for sd in sdcompid:
sdresid = ConfResource.objects.filter(compid=sd, type__name='Storage')
for sdid in sdresid:
addrquery = ConfParameter.objects.filter(resid=sdid, name='SDAddress')
portquery = ConfParameter.objects.filter(resid=sdid, name='SDPort')
address = addrquery[0].value
addrquery.delete()
portquery.delete()
sdaddrsid = ConfResource(compid=sd, name='', sub=sdid.resid, type_id=RESTYPE['SDAddresses'], description='')
sdaddrsid.save()
ipsid = ConfResource(compid=sd, name='', sub=sdaddrsid.resid, type_id=RESTYPE['IP'], description='')
ipsid.save()
a = ConfParameter(resid=ipsid, name='Addr', value=address)
a.save()
p = ConfParameter(resid=ipsid, name='Port', value=9103)
p.save()
class Migration(migrations.Migration):
dependencies = [
('config', '0002_confrtype_equ'),
]
operations = [
migrations.RunPython(updatesdstorageaddress),
]
|
<commit_before><commit_msg>Add automatic SDAddress migration to new version.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-10-31 23:40
from __future__ import unicode_literals
from django.db import migrations
from config.restype import RESTYPE
def updatesdstorageaddress(apps, schema_editor):
ConfResource = apps.get_model('config', 'ConfResource')
applied = ConfResource.objects.filter(type_id=RESTYPE['SDAddresses']).count()
if applied == 0:
ConfComponent = apps.get_model('config', 'ConfComponent')
ConfParameter = apps.get_model('config', 'ConfParameter')
sdcompid = ConfComponent.objects.filter(type='S')
for sd in sdcompid:
sdresid = ConfResource.objects.filter(compid=sd, type__name='Storage')
for sdid in sdresid:
addrquery = ConfParameter.objects.filter(resid=sdid, name='SDAddress')
portquery = ConfParameter.objects.filter(resid=sdid, name='SDPort')
address = addrquery[0].value
addrquery.delete()
portquery.delete()
sdaddrsid = ConfResource(compid=sd, name='', sub=sdid.resid, type_id=RESTYPE['SDAddresses'], description='')
sdaddrsid.save()
ipsid = ConfResource(compid=sd, name='', sub=sdaddrsid.resid, type_id=RESTYPE['IP'], description='')
ipsid.save()
a = ConfParameter(resid=ipsid, name='Addr', value=address)
a.save()
p = ConfParameter(resid=ipsid, name='Port', value=9103)
p.save()
class Migration(migrations.Migration):
dependencies = [
('config', '0002_confrtype_equ'),
]
operations = [
migrations.RunPython(updatesdstorageaddress),
]
|
|
4fe2cd2bbb83fff577bab7811a5ba5ba634d9146
|
towel/managers.py
|
towel/managers.py
|
import re
from django.db import models
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
class SearchManager(models.Manager):
search_fields = ()
def _search(self, query):
queryset = self.get_query_set()
if not query or not self.search_fields:
return queryset
for keyword in normalize_query(query):
negate = False
if len(keyword)>1:
if keyword[0] == '-':
keyword = keyword[1:]
negate = True
elif keyword[0] == '+':
keyword = keyword[1:]
if negate:
q = reduce(lambda p, q: p&q,
(~Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
else:
q = reduce(lambda p, q: p|q,
(Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
queryset = queryset.filter(q)
return queryset
|
Add search manager from metronom
|
Add search manager from metronom
|
Python
|
bsd-3-clause
|
matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel
|
Add search manager from metronom
|
import re
from django.db import models
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
class SearchManager(models.Manager):
search_fields = ()
def _search(self, query):
queryset = self.get_query_set()
if not query or not self.search_fields:
return queryset
for keyword in normalize_query(query):
negate = False
if len(keyword)>1:
if keyword[0] == '-':
keyword = keyword[1:]
negate = True
elif keyword[0] == '+':
keyword = keyword[1:]
if negate:
q = reduce(lambda p, q: p&q,
(~Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
else:
q = reduce(lambda p, q: p|q,
(Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
queryset = queryset.filter(q)
return queryset
|
<commit_before><commit_msg>Add search manager from metronom<commit_after>
|
import re
from django.db import models
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
class SearchManager(models.Manager):
search_fields = ()
def _search(self, query):
queryset = self.get_query_set()
if not query or not self.search_fields:
return queryset
for keyword in normalize_query(query):
negate = False
if len(keyword)>1:
if keyword[0] == '-':
keyword = keyword[1:]
negate = True
elif keyword[0] == '+':
keyword = keyword[1:]
if negate:
q = reduce(lambda p, q: p&q,
(~Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
else:
q = reduce(lambda p, q: p|q,
(Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
queryset = queryset.filter(q)
return queryset
|
Add search manager from metronomimport re
from django.db import models
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
class SearchManager(models.Manager):
search_fields = ()
def _search(self, query):
queryset = self.get_query_set()
if not query or not self.search_fields:
return queryset
for keyword in normalize_query(query):
negate = False
if len(keyword)>1:
if keyword[0] == '-':
keyword = keyword[1:]
negate = True
elif keyword[0] == '+':
keyword = keyword[1:]
if negate:
q = reduce(lambda p, q: p&q,
(~Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
else:
q = reduce(lambda p, q: p|q,
(Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
queryset = queryset.filter(q)
return queryset
|
<commit_before><commit_msg>Add search manager from metronom<commit_after>import re
from django.db import models
from django.db.models import Q
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
''' Splits the query string in invidual keywords, getting rid of unecessary spaces
and grouping quoted words together.
Example:
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
'''
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
class SearchManager(models.Manager):
search_fields = ()
def _search(self, query):
queryset = self.get_query_set()
if not query or not self.search_fields:
return queryset
for keyword in normalize_query(query):
negate = False
if len(keyword)>1:
if keyword[0] == '-':
keyword = keyword[1:]
negate = True
elif keyword[0] == '+':
keyword = keyword[1:]
if negate:
q = reduce(lambda p, q: p&q,
(~Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
else:
q = reduce(lambda p, q: p|q,
(Q(**{'%s__icontains' % field: keyword}) for field in self.search_fields),
Q())
queryset = queryset.filter(q)
return queryset
|
|
449c48db23a76ec1d2f04045d111d968b6df469e
|
thinc/api.py
|
thinc/api.py
|
def layerize(begin_update=None, **kwargs):
'''Wrap a function into a layer'''
if begin_update is not None:
return FunctionLayer(begin_update, *args, **kwargs)
def wrapper(begin_update):
return FunctionLayer(begin_update, *args, **kwargs)
return wrapper
def metalayerize(user_func):
def returned(layers):
forward, backward = split_backward(layers)
def begin_update(X, *args, **kwargs):
for func in forward:
X = func(X)
def finish_update(grad, *args, **kwargs):
for bwd in backward:
grad = bwd(grad, *args, **kwargs)
return grad
return x, finish_update
return FunctionLayer(begin_update, *args, **kwargs)
return returned
def multiroute(output, activity, shapes, funcs):
for i, (slice_, func) in enumerate(zip(shapes, funcs)):
output[slice_] += func(output[slice_])
return output
def sink_return(func, sink, splitter):
def wrap(*args, **kwargs):
output = func(*args, **kwargs)
keep, sink = splitter(*output)
sink(sink)
return keep
return wrap
def split_backward(layers):
backward = []
forward = [steal_callback(op.begin_update, backward.append)
for op in layers]
return forward, backward
class FunctionLayer(object):
def __init__(self, begin_update, predict_batch=None, predict_one=None,
predict_batch=None, predict_one=None, nr_in=None, nr_out=None,
**kwargs):
self.begin_update = begin_update
self.predict_batch = predict_batch
self.predict_one = predict_one
self.nr_in = nr_in
self.nr_out = nr_out
def __call__(self, X):
if isinstance(X, Minibatch):
return self.predict_batch(X)
else:
return self.predict_one(X)
|
Work on parts of functional API
|
Work on parts of functional API
|
Python
|
mit
|
explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
Work on parts of functional API
|
def layerize(begin_update=None, **kwargs):
'''Wrap a function into a layer'''
if begin_update is not None:
return FunctionLayer(begin_update, *args, **kwargs)
def wrapper(begin_update):
return FunctionLayer(begin_update, *args, **kwargs)
return wrapper
def metalayerize(user_func):
def returned(layers):
forward, backward = split_backward(layers)
def begin_update(X, *args, **kwargs):
for func in forward:
X = func(X)
def finish_update(grad, *args, **kwargs):
for bwd in backward:
grad = bwd(grad, *args, **kwargs)
return grad
return x, finish_update
return FunctionLayer(begin_update, *args, **kwargs)
return returned
def multiroute(output, activity, shapes, funcs):
for i, (slice_, func) in enumerate(zip(shapes, funcs)):
output[slice_] += func(output[slice_])
return output
def sink_return(func, sink, splitter):
def wrap(*args, **kwargs):
output = func(*args, **kwargs)
keep, sink = splitter(*output)
sink(sink)
return keep
return wrap
def split_backward(layers):
backward = []
forward = [steal_callback(op.begin_update, backward.append)
for op in layers]
return forward, backward
class FunctionLayer(object):
def __init__(self, begin_update, predict_batch=None, predict_one=None,
predict_batch=None, predict_one=None, nr_in=None, nr_out=None,
**kwargs):
self.begin_update = begin_update
self.predict_batch = predict_batch
self.predict_one = predict_one
self.nr_in = nr_in
self.nr_out = nr_out
def __call__(self, X):
if isinstance(X, Minibatch):
return self.predict_batch(X)
else:
return self.predict_one(X)
|
<commit_before><commit_msg>Work on parts of functional API<commit_after>
|
def layerize(begin_update=None, **kwargs):
'''Wrap a function into a layer'''
if begin_update is not None:
return FunctionLayer(begin_update, *args, **kwargs)
def wrapper(begin_update):
return FunctionLayer(begin_update, *args, **kwargs)
return wrapper
def metalayerize(user_func):
def returned(layers):
forward, backward = split_backward(layers)
def begin_update(X, *args, **kwargs):
for func in forward:
X = func(X)
def finish_update(grad, *args, **kwargs):
for bwd in backward:
grad = bwd(grad, *args, **kwargs)
return grad
return x, finish_update
return FunctionLayer(begin_update, *args, **kwargs)
return returned
def multiroute(output, activity, shapes, funcs):
for i, (slice_, func) in enumerate(zip(shapes, funcs)):
output[slice_] += func(output[slice_])
return output
def sink_return(func, sink, splitter):
def wrap(*args, **kwargs):
output = func(*args, **kwargs)
keep, sink = splitter(*output)
sink(sink)
return keep
return wrap
def split_backward(layers):
backward = []
forward = [steal_callback(op.begin_update, backward.append)
for op in layers]
return forward, backward
class FunctionLayer(object):
def __init__(self, begin_update, predict_batch=None, predict_one=None,
predict_batch=None, predict_one=None, nr_in=None, nr_out=None,
**kwargs):
self.begin_update = begin_update
self.predict_batch = predict_batch
self.predict_one = predict_one
self.nr_in = nr_in
self.nr_out = nr_out
def __call__(self, X):
if isinstance(X, Minibatch):
return self.predict_batch(X)
else:
return self.predict_one(X)
|
Work on parts of functional APIdef layerize(begin_update=None, **kwargs):
'''Wrap a function into a layer'''
if begin_update is not None:
return FunctionLayer(begin_update, *args, **kwargs)
def wrapper(begin_update):
return FunctionLayer(begin_update, *args, **kwargs)
return wrapper
def metalayerize(user_func):
def returned(layers):
forward, backward = split_backward(layers)
def begin_update(X, *args, **kwargs):
for func in forward:
X = func(X)
def finish_update(grad, *args, **kwargs):
for bwd in backward:
grad = bwd(grad, *args, **kwargs)
return grad
return x, finish_update
return FunctionLayer(begin_update, *args, **kwargs)
return returned
def multiroute(output, activity, shapes, funcs):
for i, (slice_, func) in enumerate(zip(shapes, funcs)):
output[slice_] += func(output[slice_])
return output
def sink_return(func, sink, splitter):
def wrap(*args, **kwargs):
output = func(*args, **kwargs)
keep, sink = splitter(*output)
sink(sink)
return keep
return wrap
def split_backward(layers):
backward = []
forward = [steal_callback(op.begin_update, backward.append)
for op in layers]
return forward, backward
class FunctionLayer(object):
def __init__(self, begin_update, predict_batch=None, predict_one=None,
predict_batch=None, predict_one=None, nr_in=None, nr_out=None,
**kwargs):
self.begin_update = begin_update
self.predict_batch = predict_batch
self.predict_one = predict_one
self.nr_in = nr_in
self.nr_out = nr_out
def __call__(self, X):
if isinstance(X, Minibatch):
return self.predict_batch(X)
else:
return self.predict_one(X)
|
<commit_before><commit_msg>Work on parts of functional API<commit_after>def layerize(begin_update=None, **kwargs):
'''Wrap a function into a layer'''
if begin_update is not None:
return FunctionLayer(begin_update, *args, **kwargs)
def wrapper(begin_update):
return FunctionLayer(begin_update, *args, **kwargs)
return wrapper
def metalayerize(user_func):
def returned(layers):
forward, backward = split_backward(layers)
def begin_update(X, *args, **kwargs):
for func in forward:
X = func(X)
def finish_update(grad, *args, **kwargs):
for bwd in backward:
grad = bwd(grad, *args, **kwargs)
return grad
return x, finish_update
return FunctionLayer(begin_update, *args, **kwargs)
return returned
def multiroute(output, activity, shapes, funcs):
for i, (slice_, func) in enumerate(zip(shapes, funcs)):
output[slice_] += func(output[slice_])
return output
def sink_return(func, sink, splitter):
def wrap(*args, **kwargs):
output = func(*args, **kwargs)
keep, sink = splitter(*output)
sink(sink)
return keep
return wrap
def split_backward(layers):
backward = []
forward = [steal_callback(op.begin_update, backward.append)
for op in layers]
return forward, backward
class FunctionLayer(object):
def __init__(self, begin_update, predict_batch=None, predict_one=None,
predict_batch=None, predict_one=None, nr_in=None, nr_out=None,
**kwargs):
self.begin_update = begin_update
self.predict_batch = predict_batch
self.predict_one = predict_one
self.nr_in = nr_in
self.nr_out = nr_out
def __call__(self, X):
if isinstance(X, Minibatch):
return self.predict_batch(X)
else:
return self.predict_one(X)
|
|
9f4905d7e3f42002209d8ce46435d3b9447de588
|
zerver/migrations/0261_pregistrationuser_clear_invited_as_admin.py
|
zerver/migrations/0261_pregistrationuser_clear_invited_as_admin.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2020-06-16 22:26
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_preregistrationuser_invited_as_admin(
apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This migration fixes any PreregistrationUser objects that might
have been already corrupted to have the administrator role by the
buggy original version of migration
0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are
rare, it is cleaner to just remove the role from all
PreregistrationUser objects than to filter for just those older
invitation objects that could have been corrupted by the original
migration, which would have been possible using the
django_migrations table to check the date when the buggy migration
was run.
"""
INVITED_AS_MEMBER = 1
INVITED_AS_REALM_ADMIN = 2
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
PreregistrationUser.objects.filter(
invited_as=INVITED_AS_REALM_ADMIN).update(
invited_as=INVITED_AS_MEMBER)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0260_missed_message_addresses_from_redis_to_db'),
]
operations = [
migrations.RunPython(
clear_preregistrationuser_invited_as_admin,
reverse_code=migrations.RunPython.noop
),
]
|
Add migration to clear INVITED_AS_REALM_ADMIN.
|
CVE-2020-14215: Add migration to clear INVITED_AS_REALM_ADMIN.
This migration fixes any PreregistrationUser objects that might have
been already corrupted to have the administrator role by the buggy
original version of migration 0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are rare, it
is cleaner to just remove the role from all PreregistrationUser
objects than to filter for just those older invitation objects that
could have been corrupted by the original migration.
|
Python
|
apache-2.0
|
hackerkid/zulip,punchagan/zulip,shubhamdhama/zulip,shubhamdhama/zulip,shubhamdhama/zulip,rht/zulip,shubhamdhama/zulip,kou/zulip,brainwane/zulip,brainwane/zulip,hackerkid/zulip,brainwane/zulip,showell/zulip,punchagan/zulip,timabbott/zulip,rht/zulip,kou/zulip,zulip/zulip,zulip/zulip,kou/zulip,brainwane/zulip,showell/zulip,andersk/zulip,timabbott/zulip,zulip/zulip,showell/zulip,shubhamdhama/zulip,eeshangarg/zulip,rht/zulip,punchagan/zulip,hackerkid/zulip,synicalsyntax/zulip,punchagan/zulip,shubhamdhama/zulip,kou/zulip,eeshangarg/zulip,synicalsyntax/zulip,andersk/zulip,timabbott/zulip,synicalsyntax/zulip,eeshangarg/zulip,punchagan/zulip,eeshangarg/zulip,rht/zulip,andersk/zulip,synicalsyntax/zulip,kou/zulip,showell/zulip,hackerkid/zulip,eeshangarg/zulip,punchagan/zulip,shubhamdhama/zulip,andersk/zulip,synicalsyntax/zulip,brainwane/zulip,rht/zulip,hackerkid/zulip,timabbott/zulip,showell/zulip,timabbott/zulip,hackerkid/zulip,punchagan/zulip,zulip/zulip,rht/zulip,rht/zulip,andersk/zulip,brainwane/zulip,showell/zulip,timabbott/zulip,eeshangarg/zulip,synicalsyntax/zulip,brainwane/zulip,zulip/zulip,zulip/zulip,timabbott/zulip,kou/zulip,showell/zulip,zulip/zulip,eeshangarg/zulip,kou/zulip,synicalsyntax/zulip,andersk/zulip,andersk/zulip,hackerkid/zulip
|
CVE-2020-14215: Add migration to clear INVITED_AS_REALM_ADMIN.
This migration fixes any PreregistrationUser objects that might have
been already corrupted to have the administrator role by the buggy
original version of migration 0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are rare, it
is cleaner to just remove the role from all PreregistrationUser
objects than to filter for just those older invitation objects that
could have been corrupted by the original migration.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2020-06-16 22:26
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_preregistrationuser_invited_as_admin(
apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This migration fixes any PreregistrationUser objects that might
have been already corrupted to have the administrator role by the
buggy original version of migration
0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are
rare, it is cleaner to just remove the role from all
PreregistrationUser objects than to filter for just those older
invitation objects that could have been corrupted by the original
migration, which would have been possible using the
django_migrations table to check the date when the buggy migration
was run.
"""
INVITED_AS_MEMBER = 1
INVITED_AS_REALM_ADMIN = 2
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
PreregistrationUser.objects.filter(
invited_as=INVITED_AS_REALM_ADMIN).update(
invited_as=INVITED_AS_MEMBER)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0260_missed_message_addresses_from_redis_to_db'),
]
operations = [
migrations.RunPython(
clear_preregistrationuser_invited_as_admin,
reverse_code=migrations.RunPython.noop
),
]
|
<commit_before><commit_msg>CVE-2020-14215: Add migration to clear INVITED_AS_REALM_ADMIN.
This migration fixes any PreregistrationUser objects that might have
been already corrupted to have the administrator role by the buggy
original version of migration 0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are rare, it
is cleaner to just remove the role from all PreregistrationUser
objects than to filter for just those older invitation objects that
could have been corrupted by the original migration.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2020-06-16 22:26
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_preregistrationuser_invited_as_admin(
apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This migration fixes any PreregistrationUser objects that might
have been already corrupted to have the administrator role by the
buggy original version of migration
0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are
rare, it is cleaner to just remove the role from all
PreregistrationUser objects than to filter for just those older
invitation objects that could have been corrupted by the original
migration, which would have been possible using the
django_migrations table to check the date when the buggy migration
was run.
"""
INVITED_AS_MEMBER = 1
INVITED_AS_REALM_ADMIN = 2
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
PreregistrationUser.objects.filter(
invited_as=INVITED_AS_REALM_ADMIN).update(
invited_as=INVITED_AS_MEMBER)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0260_missed_message_addresses_from_redis_to_db'),
]
operations = [
migrations.RunPython(
clear_preregistrationuser_invited_as_admin,
reverse_code=migrations.RunPython.noop
),
]
|
CVE-2020-14215: Add migration to clear INVITED_AS_REALM_ADMIN.
This migration fixes any PreregistrationUser objects that might have
been already corrupted to have the administrator role by the buggy
original version of migration 0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are rare, it
is cleaner to just remove the role from all PreregistrationUser
objects than to filter for just those older invitation objects that
could have been corrupted by the original migration.# -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2020-06-16 22:26
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_preregistrationuser_invited_as_admin(
apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This migration fixes any PreregistrationUser objects that might
have been already corrupted to have the administrator role by the
buggy original version of migration
0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are
rare, it is cleaner to just remove the role from all
PreregistrationUser objects than to filter for just those older
invitation objects that could have been corrupted by the original
migration, which would have been possible using the
django_migrations table to check the date when the buggy migration
was run.
"""
INVITED_AS_MEMBER = 1
INVITED_AS_REALM_ADMIN = 2
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
PreregistrationUser.objects.filter(
invited_as=INVITED_AS_REALM_ADMIN).update(
invited_as=INVITED_AS_MEMBER)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0260_missed_message_addresses_from_redis_to_db'),
]
operations = [
migrations.RunPython(
clear_preregistrationuser_invited_as_admin,
reverse_code=migrations.RunPython.noop
),
]
|
<commit_before><commit_msg>CVE-2020-14215: Add migration to clear INVITED_AS_REALM_ADMIN.
This migration fixes any PreregistrationUser objects that might have
been already corrupted to have the administrator role by the buggy
original version of migration 0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are rare, it
is cleaner to just remove the role from all PreregistrationUser
objects than to filter for just those older invitation objects that
could have been corrupted by the original migration.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.26 on 2020-06-16 22:26
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_preregistrationuser_invited_as_admin(
apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This migration fixes any PreregistrationUser objects that might
have been already corrupted to have the administrator role by the
buggy original version of migration
0198_preregistrationuser_invited_as.
Since invitations that create new users as administrators are
rare, it is cleaner to just remove the role from all
PreregistrationUser objects than to filter for just those older
invitation objects that could have been corrupted by the original
migration, which would have been possible using the
django_migrations table to check the date when the buggy migration
was run.
"""
INVITED_AS_MEMBER = 1
INVITED_AS_REALM_ADMIN = 2
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
PreregistrationUser.objects.filter(
invited_as=INVITED_AS_REALM_ADMIN).update(
invited_as=INVITED_AS_MEMBER)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0260_missed_message_addresses_from_redis_to_db'),
]
operations = [
migrations.RunPython(
clear_preregistrationuser_invited_as_admin,
reverse_code=migrations.RunPython.noop
),
]
|
|
edcde8ed3562e19b7bde43632965c2902a8e7f25
|
troposphere/sns.py
|
troposphere/sns.py
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere,ikben/troposphere,ikben/troposphere
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
Add Tags to SNS::Topic per 2019-11-31 changes
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
<commit_msg>Add Tags to SNS::Topic per 2019-11-31 changes<commit_after>
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
Add Tags to SNS::Topic per 2019-11-31 changes# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
<commit_msg>Add Tags to SNS::Topic per 2019-11-31 changes<commit_after># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
f2bbd5bd1eca7906d7d09bc595abd67af02b23ea
|
utils/sleepTest.py
|
utils/sleepTest.py
|
#!/usr/bin/python
"""Puts sensor to sleep and waits for it to wakeup"""
import minimalmodbus
import serial
from time import sleep
from sys import exit
ADDRESS = 1
SLEEPTIME = 33
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
#minimalmodbus.TIMEOUT=0.5
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
#sensor.debug=True
print("Reading from sensor...", sensor.read_register(0, functioncode=4))
print("Going to sleep for " + str(SLEEPTIME) + " seconds")
sensor.write_register(4, value=SLEEPTIME, functioncode=6)
sleep(0.01)
sleptSeconds = 0
while True:
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
print("Sleep done", sensor.read_register(0, functioncode=4), sensor.read_register(1, functioncode=4, numberOfDecimals=1, signed=True))
if sleptSeconds < SLEEPTIME:
raise Exception("Slept too little!!!")
exit()
except (IOError, ValueError):
sleep(1)
sleptSeconds += 1
print("Slept " + str(sleptSeconds) + " seconds")
if sleptSeconds > SLEEPTIME:
raise Exception("Sleeping for too long!!!")
|
Add sleep functionality testing code
|
Add sleep functionality testing code
|
Python
|
apache-2.0
|
Miceuz/rs485-moist-sensor,Miceuz/rs485-moist-sensor
|
Add sleep functionality testing code
|
#!/usr/bin/python
"""Puts sensor to sleep and waits for it to wakeup"""
import minimalmodbus
import serial
from time import sleep
from sys import exit
ADDRESS = 1
SLEEPTIME = 33
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
#minimalmodbus.TIMEOUT=0.5
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
#sensor.debug=True
print("Reading from sensor...", sensor.read_register(0, functioncode=4))
print("Going to sleep for " + str(SLEEPTIME) + " seconds")
sensor.write_register(4, value=SLEEPTIME, functioncode=6)
sleep(0.01)
sleptSeconds = 0
while True:
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
print("Sleep done", sensor.read_register(0, functioncode=4), sensor.read_register(1, functioncode=4, numberOfDecimals=1, signed=True))
if sleptSeconds < SLEEPTIME:
raise Exception("Slept too little!!!")
exit()
except (IOError, ValueError):
sleep(1)
sleptSeconds += 1
print("Slept " + str(sleptSeconds) + " seconds")
if sleptSeconds > SLEEPTIME:
raise Exception("Sleeping for too long!!!")
|
<commit_before><commit_msg>Add sleep functionality testing code<commit_after>
|
#!/usr/bin/python
"""Puts sensor to sleep and waits for it to wakeup"""
import minimalmodbus
import serial
from time import sleep
from sys import exit
ADDRESS = 1
SLEEPTIME = 33
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
#minimalmodbus.TIMEOUT=0.5
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
#sensor.debug=True
print("Reading from sensor...", sensor.read_register(0, functioncode=4))
print("Going to sleep for " + str(SLEEPTIME) + " seconds")
sensor.write_register(4, value=SLEEPTIME, functioncode=6)
sleep(0.01)
sleptSeconds = 0
while True:
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
print("Sleep done", sensor.read_register(0, functioncode=4), sensor.read_register(1, functioncode=4, numberOfDecimals=1, signed=True))
if sleptSeconds < SLEEPTIME:
raise Exception("Slept too little!!!")
exit()
except (IOError, ValueError):
sleep(1)
sleptSeconds += 1
print("Slept " + str(sleptSeconds) + " seconds")
if sleptSeconds > SLEEPTIME:
raise Exception("Sleeping for too long!!!")
|
Add sleep functionality testing code#!/usr/bin/python
"""Puts sensor to sleep and waits for it to wakeup"""
import minimalmodbus
import serial
from time import sleep
from sys import exit
ADDRESS = 1
SLEEPTIME = 33
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
#minimalmodbus.TIMEOUT=0.5
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
#sensor.debug=True
print("Reading from sensor...", sensor.read_register(0, functioncode=4))
print("Going to sleep for " + str(SLEEPTIME) + " seconds")
sensor.write_register(4, value=SLEEPTIME, functioncode=6)
sleep(0.01)
sleptSeconds = 0
while True:
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
print("Sleep done", sensor.read_register(0, functioncode=4), sensor.read_register(1, functioncode=4, numberOfDecimals=1, signed=True))
if sleptSeconds < SLEEPTIME:
raise Exception("Slept too little!!!")
exit()
except (IOError, ValueError):
sleep(1)
sleptSeconds += 1
print("Slept " + str(sleptSeconds) + " seconds")
if sleptSeconds > SLEEPTIME:
raise Exception("Sleeping for too long!!!")
|
<commit_before><commit_msg>Add sleep functionality testing code<commit_after>#!/usr/bin/python
"""Puts sensor to sleep and waits for it to wakeup"""
import minimalmodbus
import serial
from time import sleep
from sys import exit
ADDRESS = 1
SLEEPTIME = 33
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
#minimalmodbus.TIMEOUT=0.5
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
#sensor.debug=True
print("Reading from sensor...", sensor.read_register(0, functioncode=4))
print("Going to sleep for " + str(SLEEPTIME) + " seconds")
sensor.write_register(4, value=SLEEPTIME, functioncode=6)
sleep(0.01)
sleptSeconds = 0
while True:
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS)
print("Sleep done", sensor.read_register(0, functioncode=4), sensor.read_register(1, functioncode=4, numberOfDecimals=1, signed=True))
if sleptSeconds < SLEEPTIME:
raise Exception("Slept too little!!!")
exit()
except (IOError, ValueError):
sleep(1)
sleptSeconds += 1
print("Slept " + str(sleptSeconds) + " seconds")
if sleptSeconds > SLEEPTIME:
raise Exception("Sleeping for too long!!!")
|
|
346188fe171b98aca87f3e5f96d6e41b1eb46fd1
|
tests/test_types.py
|
tests/test_types.py
|
from crosscompute.exceptions import DataTypeError
from crosscompute.types import DataType
from pytest import raises
class ADataType(DataType):
@classmethod
def load(Class, path):
if path == 'x':
raise Exception
instance = Class()
instance.path = path
return instance
@classmethod
def parse(Class, x, default_value=None):
if x == 'd':
raise DataTypeError
if x == 'e':
raise Exception
return 'a'
class BDataType(DataType):
@classmethod
def load(Class, path, default_value=None):
instance = Class()
instance.path = path
instance.default_value = default_value
return instance
class CDataType(DataType):
@classmethod
def load_for_view(Class, path):
instance = Class()
instance.path = path
return instance
class TestDataType(object):
def test_load_for_view_safely(self):
x = ADataType.load_for_view_safely('a')
assert x.path == 'a'
x = ADataType.load_for_view_safely('x')
assert x is None
x = BDataType.load_for_view_safely('b', 'bb')
assert x.path == 'b'
assert x.default_value == 'bb'
x = CDataType.load_for_view_safely('c')
assert x.path == 'c'
def test_parse_safely(self):
assert ADataType.parse_safely(None) is None
assert ADataType.parse_safely(1) is 'a'
with raises(DataTypeError):
ADataType.parse_safely('d')
assert ADataType.parse_safely('e') == 'e'
|
Add tests for load_for_view_safely and parse_safely
|
Add tests for load_for_view_safely and parse_safely
|
Python
|
mit
|
crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute
|
Add tests for load_for_view_safely and parse_safely
|
from crosscompute.exceptions import DataTypeError
from crosscompute.types import DataType
from pytest import raises
class ADataType(DataType):
@classmethod
def load(Class, path):
if path == 'x':
raise Exception
instance = Class()
instance.path = path
return instance
@classmethod
def parse(Class, x, default_value=None):
if x == 'd':
raise DataTypeError
if x == 'e':
raise Exception
return 'a'
class BDataType(DataType):
@classmethod
def load(Class, path, default_value=None):
instance = Class()
instance.path = path
instance.default_value = default_value
return instance
class CDataType(DataType):
@classmethod
def load_for_view(Class, path):
instance = Class()
instance.path = path
return instance
class TestDataType(object):
def test_load_for_view_safely(self):
x = ADataType.load_for_view_safely('a')
assert x.path == 'a'
x = ADataType.load_for_view_safely('x')
assert x is None
x = BDataType.load_for_view_safely('b', 'bb')
assert x.path == 'b'
assert x.default_value == 'bb'
x = CDataType.load_for_view_safely('c')
assert x.path == 'c'
def test_parse_safely(self):
assert ADataType.parse_safely(None) is None
assert ADataType.parse_safely(1) is 'a'
with raises(DataTypeError):
ADataType.parse_safely('d')
assert ADataType.parse_safely('e') == 'e'
|
<commit_before><commit_msg>Add tests for load_for_view_safely and parse_safely<commit_after>
|
from crosscompute.exceptions import DataTypeError
from crosscompute.types import DataType
from pytest import raises
class ADataType(DataType):
@classmethod
def load(Class, path):
if path == 'x':
raise Exception
instance = Class()
instance.path = path
return instance
@classmethod
def parse(Class, x, default_value=None):
if x == 'd':
raise DataTypeError
if x == 'e':
raise Exception
return 'a'
class BDataType(DataType):
@classmethod
def load(Class, path, default_value=None):
instance = Class()
instance.path = path
instance.default_value = default_value
return instance
class CDataType(DataType):
@classmethod
def load_for_view(Class, path):
instance = Class()
instance.path = path
return instance
class TestDataType(object):
def test_load_for_view_safely(self):
x = ADataType.load_for_view_safely('a')
assert x.path == 'a'
x = ADataType.load_for_view_safely('x')
assert x is None
x = BDataType.load_for_view_safely('b', 'bb')
assert x.path == 'b'
assert x.default_value == 'bb'
x = CDataType.load_for_view_safely('c')
assert x.path == 'c'
def test_parse_safely(self):
assert ADataType.parse_safely(None) is None
assert ADataType.parse_safely(1) is 'a'
with raises(DataTypeError):
ADataType.parse_safely('d')
assert ADataType.parse_safely('e') == 'e'
|
Add tests for load_for_view_safely and parse_safelyfrom crosscompute.exceptions import DataTypeError
from crosscompute.types import DataType
from pytest import raises
class ADataType(DataType):
@classmethod
def load(Class, path):
if path == 'x':
raise Exception
instance = Class()
instance.path = path
return instance
@classmethod
def parse(Class, x, default_value=None):
if x == 'd':
raise DataTypeError
if x == 'e':
raise Exception
return 'a'
class BDataType(DataType):
@classmethod
def load(Class, path, default_value=None):
instance = Class()
instance.path = path
instance.default_value = default_value
return instance
class CDataType(DataType):
@classmethod
def load_for_view(Class, path):
instance = Class()
instance.path = path
return instance
class TestDataType(object):
def test_load_for_view_safely(self):
x = ADataType.load_for_view_safely('a')
assert x.path == 'a'
x = ADataType.load_for_view_safely('x')
assert x is None
x = BDataType.load_for_view_safely('b', 'bb')
assert x.path == 'b'
assert x.default_value == 'bb'
x = CDataType.load_for_view_safely('c')
assert x.path == 'c'
def test_parse_safely(self):
assert ADataType.parse_safely(None) is None
assert ADataType.parse_safely(1) is 'a'
with raises(DataTypeError):
ADataType.parse_safely('d')
assert ADataType.parse_safely('e') == 'e'
|
<commit_before><commit_msg>Add tests for load_for_view_safely and parse_safely<commit_after>from crosscompute.exceptions import DataTypeError
from crosscompute.types import DataType
from pytest import raises
class ADataType(DataType):
@classmethod
def load(Class, path):
if path == 'x':
raise Exception
instance = Class()
instance.path = path
return instance
@classmethod
def parse(Class, x, default_value=None):
if x == 'd':
raise DataTypeError
if x == 'e':
raise Exception
return 'a'
class BDataType(DataType):
@classmethod
def load(Class, path, default_value=None):
instance = Class()
instance.path = path
instance.default_value = default_value
return instance
class CDataType(DataType):
@classmethod
def load_for_view(Class, path):
instance = Class()
instance.path = path
return instance
class TestDataType(object):
def test_load_for_view_safely(self):
x = ADataType.load_for_view_safely('a')
assert x.path == 'a'
x = ADataType.load_for_view_safely('x')
assert x is None
x = BDataType.load_for_view_safely('b', 'bb')
assert x.path == 'b'
assert x.default_value == 'bb'
x = CDataType.load_for_view_safely('c')
assert x.path == 'c'
def test_parse_safely(self):
assert ADataType.parse_safely(None) is None
assert ADataType.parse_safely(1) is 'a'
with raises(DataTypeError):
ADataType.parse_safely('d')
assert ADataType.parse_safely('e') == 'e'
|
|
f605c6918f0868fc46a15c08650e5406453451c5
|
tools/key-update.py
|
tools/key-update.py
|
#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir,
'anvil',
'__init__.py')):
sys.path.insert(0, possible_topdir)
from anvil import cfg
from anvil import cfg_helpers
from anvil import passwords
from anvil import utils
from anvil.helpers import initializers
import yaml
def get_config():
config = cfg.ProxyConfig()
config.add_read_resolver(cfg.EnvResolver())
config.add_read_resolver(cfg.ConfigResolver(cfg.IgnoreMissingConfigParser(fns=cfg_helpers.find_config())))
config.add_password_resolver(passwords.ConfigPassword(config))
config.add_password_resolver(passwords.InputPassword(config))
config.add_password_resolver(passwords.RandomPassword(config))
return config
def setup_logging(level):
if level == 1:
logging.setupLogging(logging.INFO)
else:
logging.setupLogging(logging.DEBUG)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--file",
action="store",
dest="yaml_fn",
metavar="FILE",
help=("yaml file that contains your new roles/endpoints/services..."))
parser.add_option("-v", "--verbose",
action="append_const",
const=1,
dest="verbosity",
default=[1],
help="increase the verbose level")
(options, args) = parser.parse_args()
if not options.yaml_fn:
parser.error("File option required")
yaml_data = None
with open(options.yaml_fn, "r") as fh:
yaml_data = yaml.load(fh)
setup_logging(len(options.verbosity))
utils.welcome(prog_name="Keystone updater tool")
cfg = get_config()
|
Add a helper tool for updating keystone
|
Add a helper tool for updating keystone
|
Python
|
apache-2.0
|
stackforge/anvil,mc2014/anvil,stackforge/anvil,mc2014/anvil
|
Add a helper tool for updating keystone
|
#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir,
'anvil',
'__init__.py')):
sys.path.insert(0, possible_topdir)
from anvil import cfg
from anvil import cfg_helpers
from anvil import passwords
from anvil import utils
from anvil.helpers import initializers
import yaml
def get_config():
config = cfg.ProxyConfig()
config.add_read_resolver(cfg.EnvResolver())
config.add_read_resolver(cfg.ConfigResolver(cfg.IgnoreMissingConfigParser(fns=cfg_helpers.find_config())))
config.add_password_resolver(passwords.ConfigPassword(config))
config.add_password_resolver(passwords.InputPassword(config))
config.add_password_resolver(passwords.RandomPassword(config))
return config
def setup_logging(level):
if level == 1:
logging.setupLogging(logging.INFO)
else:
logging.setupLogging(logging.DEBUG)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--file",
action="store",
dest="yaml_fn",
metavar="FILE",
help=("yaml file that contains your new roles/endpoints/services..."))
parser.add_option("-v", "--verbose",
action="append_const",
const=1,
dest="verbosity",
default=[1],
help="increase the verbose level")
(options, args) = parser.parse_args()
if not options.yaml_fn:
parser.error("File option required")
yaml_data = None
with open(options.yaml_fn, "r") as fh:
yaml_data = yaml.load(fh)
setup_logging(len(options.verbosity))
utils.welcome(prog_name="Keystone updater tool")
cfg = get_config()
|
<commit_before><commit_msg>Add a helper tool for updating keystone<commit_after>
|
#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir,
'anvil',
'__init__.py')):
sys.path.insert(0, possible_topdir)
from anvil import cfg
from anvil import cfg_helpers
from anvil import passwords
from anvil import utils
from anvil.helpers import initializers
import yaml
def get_config():
config = cfg.ProxyConfig()
config.add_read_resolver(cfg.EnvResolver())
config.add_read_resolver(cfg.ConfigResolver(cfg.IgnoreMissingConfigParser(fns=cfg_helpers.find_config())))
config.add_password_resolver(passwords.ConfigPassword(config))
config.add_password_resolver(passwords.InputPassword(config))
config.add_password_resolver(passwords.RandomPassword(config))
return config
def setup_logging(level):
if level == 1:
logging.setupLogging(logging.INFO)
else:
logging.setupLogging(logging.DEBUG)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--file",
action="store",
dest="yaml_fn",
metavar="FILE",
help=("yaml file that contains your new roles/endpoints/services..."))
parser.add_option("-v", "--verbose",
action="append_const",
const=1,
dest="verbosity",
default=[1],
help="increase the verbose level")
(options, args) = parser.parse_args()
if not options.yaml_fn:
parser.error("File option required")
yaml_data = None
with open(options.yaml_fn, "r") as fh:
yaml_data = yaml.load(fh)
setup_logging(len(options.verbosity))
utils.welcome(prog_name="Keystone updater tool")
cfg = get_config()
|
Add a helper tool for updating keystone#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir,
'anvil',
'__init__.py')):
sys.path.insert(0, possible_topdir)
from anvil import cfg
from anvil import cfg_helpers
from anvil import passwords
from anvil import utils
from anvil.helpers import initializers
import yaml
def get_config():
config = cfg.ProxyConfig()
config.add_read_resolver(cfg.EnvResolver())
config.add_read_resolver(cfg.ConfigResolver(cfg.IgnoreMissingConfigParser(fns=cfg_helpers.find_config())))
config.add_password_resolver(passwords.ConfigPassword(config))
config.add_password_resolver(passwords.InputPassword(config))
config.add_password_resolver(passwords.RandomPassword(config))
return config
def setup_logging(level):
if level == 1:
logging.setupLogging(logging.INFO)
else:
logging.setupLogging(logging.DEBUG)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--file",
action="store",
dest="yaml_fn",
metavar="FILE",
help=("yaml file that contains your new roles/endpoints/services..."))
parser.add_option("-v", "--verbose",
action="append_const",
const=1,
dest="verbosity",
default=[1],
help="increase the verbose level")
(options, args) = parser.parse_args()
if not options.yaml_fn:
parser.error("File option required")
yaml_data = None
with open(options.yaml_fn, "r") as fh:
yaml_data = yaml.load(fh)
setup_logging(len(options.verbosity))
utils.welcome(prog_name="Keystone updater tool")
cfg = get_config()
|
<commit_before><commit_msg>Add a helper tool for updating keystone<commit_after>#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir,
'anvil',
'__init__.py')):
sys.path.insert(0, possible_topdir)
from anvil import cfg
from anvil import cfg_helpers
from anvil import passwords
from anvil import utils
from anvil.helpers import initializers
import yaml
def get_config():
config = cfg.ProxyConfig()
config.add_read_resolver(cfg.EnvResolver())
config.add_read_resolver(cfg.ConfigResolver(cfg.IgnoreMissingConfigParser(fns=cfg_helpers.find_config())))
config.add_password_resolver(passwords.ConfigPassword(config))
config.add_password_resolver(passwords.InputPassword(config))
config.add_password_resolver(passwords.RandomPassword(config))
return config
def setup_logging(level):
if level == 1:
logging.setupLogging(logging.INFO)
else:
logging.setupLogging(logging.DEBUG)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-f", "--file",
action="store",
dest="yaml_fn",
metavar="FILE",
help=("yaml file that contains your new roles/endpoints/services..."))
parser.add_option("-v", "--verbose",
action="append_const",
const=1,
dest="verbosity",
default=[1],
help="increase the verbose level")
(options, args) = parser.parse_args()
if not options.yaml_fn:
parser.error("File option required")
yaml_data = None
with open(options.yaml_fn, "r") as fh:
yaml_data = yaml.load(fh)
setup_logging(len(options.verbosity))
utils.welcome(prog_name="Keystone updater tool")
cfg = get_config()
|
|
a452eedc5f5a7e5f59776ba66a18f57c72bca3c4
|
tests/test_jit.py
|
tests/test_jit.py
|
import afnumpy
from asserts import *
def test_conditionals():
a = afnumpy.arange(10, dtype="float32") - 5.
b = afnumpy.ones((10), dtype="float32")
afnumpy.arrayfire.backend.get().af_eval(a.d_array.arr)
a_mask = a < 0.
a_sum = a_mask.sum()
a -= b
assert(a_sum == a_mask.sum())
|
Add a file to test JIT related issues
|
Add a file to test JIT related issues
|
Python
|
bsd-2-clause
|
FilipeMaia/afnumpy,daurer/afnumpy
|
Add a file to test JIT related issues
|
import afnumpy
from asserts import *
def test_conditionals():
a = afnumpy.arange(10, dtype="float32") - 5.
b = afnumpy.ones((10), dtype="float32")
afnumpy.arrayfire.backend.get().af_eval(a.d_array.arr)
a_mask = a < 0.
a_sum = a_mask.sum()
a -= b
assert(a_sum == a_mask.sum())
|
<commit_before><commit_msg>Add a file to test JIT related issues<commit_after>
|
import afnumpy
from asserts import *
def test_conditionals():
a = afnumpy.arange(10, dtype="float32") - 5.
b = afnumpy.ones((10), dtype="float32")
afnumpy.arrayfire.backend.get().af_eval(a.d_array.arr)
a_mask = a < 0.
a_sum = a_mask.sum()
a -= b
assert(a_sum == a_mask.sum())
|
Add a file to test JIT related issuesimport afnumpy
from asserts import *
def test_conditionals():
a = afnumpy.arange(10, dtype="float32") - 5.
b = afnumpy.ones((10), dtype="float32")
afnumpy.arrayfire.backend.get().af_eval(a.d_array.arr)
a_mask = a < 0.
a_sum = a_mask.sum()
a -= b
assert(a_sum == a_mask.sum())
|
<commit_before><commit_msg>Add a file to test JIT related issues<commit_after>import afnumpy
from asserts import *
def test_conditionals():
a = afnumpy.arange(10, dtype="float32") - 5.
b = afnumpy.ones((10), dtype="float32")
afnumpy.arrayfire.backend.get().af_eval(a.d_array.arr)
a_mask = a < 0.
a_sum = a_mask.sum()
a -= b
assert(a_sum == a_mask.sum())
|
|
3bcafade7e9a611d073a2baf3d66f46caee9b4aa
|
flexget/plugins/operate/feed_priority.py
|
flexget/plugins/operate/feed_priority.py
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
def on_process_start(self, task, config):
task.priority = config
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
# Currently the manager reads this value directly out of the config when the 'execute' command is run, and this plugin
# does nothing but make the config key valid.
# In daemon mode, schedules should be made which run tasks in the proper order instead of using this.
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
Add some notes on current state of priority plugin
|
Add some notes on current state of priority plugin
|
Python
|
mit
|
X-dark/Flexget,ianstalk/Flexget,JorisDeRieck/Flexget,Pretagonist/Flexget,ratoaq2/Flexget,X-dark/Flexget,qk4l/Flexget,tsnoam/Flexget,drwyrm/Flexget,crawln45/Flexget,cvium/Flexget,jawilson/Flexget,ratoaq2/Flexget,v17al/Flexget,grrr2/Flexget,patsissons/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,tarzasai/Flexget,qvazzler/Flexget,ratoaq2/Flexget,Flexget/Flexget,v17al/Flexget,lildadou/Flexget,thalamus/Flexget,thalamus/Flexget,OmgOhnoes/Flexget,offbyone/Flexget,jawilson/Flexget,sean797/Flexget,antivirtel/Flexget,qk4l/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,tarzasai/Flexget,Pretagonist/Flexget,crawln45/Flexget,ZefQ/Flexget,ianstalk/Flexget,LynxyssCZ/Flexget,ibrahimkarahan/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,vfrc2/Flexget,cvium/Flexget,tobinjt/Flexget,tsnoam/Flexget,dsemi/Flexget,gazpachoking/Flexget,thalamus/Flexget,sean797/Flexget,patsissons/Flexget,voriux/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,oxc/Flexget,dsemi/Flexget,tsnoam/Flexget,qvazzler/Flexget,Pretagonist/Flexget,jacobmetrick/Flexget,Flexget/Flexget,lildadou/Flexget,voriux/Flexget,antivirtel/Flexget,camon/Flexget,asm0dey/Flexget,cvium/Flexget,OmgOhnoes/Flexget,patsissons/Flexget,vfrc2/Flexget,Flexget/Flexget,oxc/Flexget,poulpito/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,poulpito/Flexget,spencerjanssen/Flexget,spencerjanssen/Flexget,antivirtel/Flexget,malkavi/Flexget,ibrahimkarahan/Flexget,drwyrm/Flexget,drwyrm/Flexget,Danfocus/Flexget,malkavi/Flexget,camon/Flexget,jacobmetrick/Flexget,Danfocus/Flexget,qvazzler/Flexget,xfouloux/Flexget,asm0dey/Flexget,tobinjt/Flexget,crawln45/Flexget,jawilson/Flexget,X-dark/Flexget,tarzasai/Flexget,Flexget/Flexget,offbyone/Flexget,tobinjt/Flexget,ianstalk/Flexget,vfrc2/Flexget,asm0dey/Flexget,jacobmetrick/Flexget,tvcsantos/Flexget,grrr2/Flexget,xfouloux/Flexget,tvcsantos/Flexget,v17al/Flexget,grrr2/Flexget,xfouloux/Flexget,ZefQ/Flexget,poulpito/Flexget,sean797/Flexget,Danfocus/Flexget,spencerjanssen/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,ZefQ/Flexget,jawilson/Flexget,offbyone/Flexget,oxc/Flexget,ibrahimkarahan/Flexget,malkavi/Flexget,gazpachoking/Flexget
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
def on_process_start(self, task, config):
task.priority = config
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
Add some notes on current state of priority plugin
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
# Currently the manager reads this value directly out of the config when the 'execute' command is run, and this plugin
# does nothing but make the config key valid.
# In daemon mode, schedules should be made which run tasks in the proper order instead of using this.
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
def on_process_start(self, task, config):
task.priority = config
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
<commit_msg>Add some notes on current state of priority plugin<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
# Currently the manager reads this value directly out of the config when the 'execute' command is run, and this plugin
# does nothing but make the config key valid.
# In daemon mode, schedules should be made which run tasks in the proper order instead of using this.
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
def on_process_start(self, task, config):
task.priority = config
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
Add some notes on current state of priority pluginfrom __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
# Currently the manager reads this value directly out of the config when the 'execute' command is run, and this plugin
# does nothing but make the config key valid.
# In daemon mode, schedules should be made which run tasks in the proper order instead of using this.
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
<commit_before>from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
def on_process_start(self, task, config):
task.priority = config
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
<commit_msg>Add some notes on current state of priority plugin<commit_after>from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('priority')
# TODO: 1.2 figure out replacement for this
# Currently the manager reads this value directly out of the config when the 'execute' command is run, and this plugin
# does nothing but make the config key valid.
# In daemon mode, schedules should be made which run tasks in the proper order instead of using this.
class TaskPriority(object):
"""Set task priorities"""
schema = {'type': 'integer'}
@event('plugin.register')
def register_plugin():
plugin.register(TaskPriority, 'priority', api_ver=2)
|
743ae5270d2ba24da652110a967f15b5fa526e3d
|
plugins/plugin_nginx_error.py
|
plugins/plugin_nginx_error.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
from manager import Plugin
class NginxError(Plugin):
def __init__(self, **kwargs):
self.keywords = ['nginx', 'error']
self.total_line = 0
self.level_dict = {"error": 0, "notice": 0, "info": 0}
self.client_dict = {}
def process(self, **kwargs):
"""docstring for process"""
self.total_line += 1
self.level_dict[kwargs['level']] += 1
message = kwargs['message']
m = re.match(".*client: (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*", message)
if m:
if m.group(1) in self.client_dict:
self.client_dict[m.group(1)] += 1
else:
self.client_dict[m.group(1)] = 1
def report(self, **kwargs):
"""docstring for report"""
print "== Nginx Error =="
print "Nginx total error line: %d" % self.total_line
for level in self.level_dict.keys():
print "%s: %d" % (level, self.level_dict[level])
for client in self.client_dict.keys():
print "%s: %d" % (client, self.client_dict[client])
|
Add plugin for parse nginx error
|
Add plugin for parse nginx error
|
Python
|
apache-2.0
|
keepzero/fluent-mongo-parser
|
Add plugin for parse nginx error
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
from manager import Plugin
class NginxError(Plugin):
def __init__(self, **kwargs):
self.keywords = ['nginx', 'error']
self.total_line = 0
self.level_dict = {"error": 0, "notice": 0, "info": 0}
self.client_dict = {}
def process(self, **kwargs):
"""docstring for process"""
self.total_line += 1
self.level_dict[kwargs['level']] += 1
message = kwargs['message']
m = re.match(".*client: (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*", message)
if m:
if m.group(1) in self.client_dict:
self.client_dict[m.group(1)] += 1
else:
self.client_dict[m.group(1)] = 1
def report(self, **kwargs):
"""docstring for report"""
print "== Nginx Error =="
print "Nginx total error line: %d" % self.total_line
for level in self.level_dict.keys():
print "%s: %d" % (level, self.level_dict[level])
for client in self.client_dict.keys():
print "%s: %d" % (client, self.client_dict[client])
|
<commit_before><commit_msg>Add plugin for parse nginx error<commit_after>
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
from manager import Plugin
class NginxError(Plugin):
def __init__(self, **kwargs):
self.keywords = ['nginx', 'error']
self.total_line = 0
self.level_dict = {"error": 0, "notice": 0, "info": 0}
self.client_dict = {}
def process(self, **kwargs):
"""docstring for process"""
self.total_line += 1
self.level_dict[kwargs['level']] += 1
message = kwargs['message']
m = re.match(".*client: (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*", message)
if m:
if m.group(1) in self.client_dict:
self.client_dict[m.group(1)] += 1
else:
self.client_dict[m.group(1)] = 1
def report(self, **kwargs):
"""docstring for report"""
print "== Nginx Error =="
print "Nginx total error line: %d" % self.total_line
for level in self.level_dict.keys():
print "%s: %d" % (level, self.level_dict[level])
for client in self.client_dict.keys():
print "%s: %d" % (client, self.client_dict[client])
|
Add plugin for parse nginx error#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
from manager import Plugin
class NginxError(Plugin):
def __init__(self, **kwargs):
self.keywords = ['nginx', 'error']
self.total_line = 0
self.level_dict = {"error": 0, "notice": 0, "info": 0}
self.client_dict = {}
def process(self, **kwargs):
"""docstring for process"""
self.total_line += 1
self.level_dict[kwargs['level']] += 1
message = kwargs['message']
m = re.match(".*client: (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*", message)
if m:
if m.group(1) in self.client_dict:
self.client_dict[m.group(1)] += 1
else:
self.client_dict[m.group(1)] = 1
def report(self, **kwargs):
"""docstring for report"""
print "== Nginx Error =="
print "Nginx total error line: %d" % self.total_line
for level in self.level_dict.keys():
print "%s: %d" % (level, self.level_dict[level])
for client in self.client_dict.keys():
print "%s: %d" % (client, self.client_dict[client])
|
<commit_before><commit_msg>Add plugin for parse nginx error<commit_after>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import re
from manager import Plugin
class NginxError(Plugin):
def __init__(self, **kwargs):
self.keywords = ['nginx', 'error']
self.total_line = 0
self.level_dict = {"error": 0, "notice": 0, "info": 0}
self.client_dict = {}
def process(self, **kwargs):
"""docstring for process"""
self.total_line += 1
self.level_dict[kwargs['level']] += 1
message = kwargs['message']
m = re.match(".*client: (\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*", message)
if m:
if m.group(1) in self.client_dict:
self.client_dict[m.group(1)] += 1
else:
self.client_dict[m.group(1)] = 1
def report(self, **kwargs):
"""docstring for report"""
print "== Nginx Error =="
print "Nginx total error line: %d" % self.total_line
for level in self.level_dict.keys():
print "%s: %d" % (level, self.level_dict[level])
for client in self.client_dict.keys():
print "%s: %d" % (client, self.client_dict[client])
|
|
df438a82cb78ecb0404b39182b7a4e049fbac2f9
|
pdc/apps/osbs/migrations/0002_auto_20151001_1115.py
|
pdc/apps/osbs/migrations/0002_auto_20151001_1115.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('osbs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='osbsrecord',
name='component',
field=models.OneToOneField(related_name='osbs', to='component.ReleaseComponent'),
),
]
|
Add forgotten migration to OSBS app
|
Add forgotten migration to OSBS app
|
Python
|
mit
|
pombredanne/product-definition-center,xychu/product-definition-center,product-definition-center/product-definition-center,pombredanne/product-definition-center,product-definition-center/product-definition-center,tzhaoredhat/automation,xychu/product-definition-center,pombredanne/product-definition-center,lao605/product-definition-center,release-engineering/product-definition-center,tzhaoredhat/automation,release-engineering/product-definition-center,xychu/product-definition-center,lao605/product-definition-center,product-definition-center/product-definition-center,xychu/product-definition-center,release-engineering/product-definition-center,lao605/product-definition-center,lao605/product-definition-center,tzhaoredhat/automation,tzhaoredhat/automation,product-definition-center/product-definition-center,release-engineering/product-definition-center,pombredanne/product-definition-center
|
Add forgotten migration to OSBS app
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('osbs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='osbsrecord',
name='component',
field=models.OneToOneField(related_name='osbs', to='component.ReleaseComponent'),
),
]
|
<commit_before><commit_msg>Add forgotten migration to OSBS app<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('osbs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='osbsrecord',
name='component',
field=models.OneToOneField(related_name='osbs', to='component.ReleaseComponent'),
),
]
|
Add forgotten migration to OSBS app# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('osbs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='osbsrecord',
name='component',
field=models.OneToOneField(related_name='osbs', to='component.ReleaseComponent'),
),
]
|
<commit_before><commit_msg>Add forgotten migration to OSBS app<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('osbs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='osbsrecord',
name='component',
field=models.OneToOneField(related_name='osbs', to='component.ReleaseComponent'),
),
]
|
|
981e8bb1898ef186e646beab98a637a2bcf8c9a0
|
src/examples/gpiozero/led_chaser.py
|
src/examples/gpiozero/led_chaser.py
|
#!/usr/bin/env python3
"""Demonstrates on board LED support with correct polarity.
Implements simple LED chaser.
"""
from time import sleep
from gpiozero import LED
from aiy.pins import (PIN_A, PIN_B, PIN_C, PIN_D)
leds = (LED(PIN_A), LED(PIN_B), LED(PIN_C), LED(PIN_D))
while True:
for led in leds:
led.on()
sleep(0.5)
led.off()
|
Add simple led chaser example.
|
Add simple led chaser example.
Change-Id: I18a5dc49d115d9f222ea2445a086fc9994adce06
|
Python
|
apache-2.0
|
google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian,google/aiyprojects-raspbian
|
Add simple led chaser example.
Change-Id: I18a5dc49d115d9f222ea2445a086fc9994adce06
|
#!/usr/bin/env python3
"""Demonstrates on board LED support with correct polarity.
Implements simple LED chaser.
"""
from time import sleep
from gpiozero import LED
from aiy.pins import (PIN_A, PIN_B, PIN_C, PIN_D)
leds = (LED(PIN_A), LED(PIN_B), LED(PIN_C), LED(PIN_D))
while True:
for led in leds:
led.on()
sleep(0.5)
led.off()
|
<commit_before><commit_msg>Add simple led chaser example.
Change-Id: I18a5dc49d115d9f222ea2445a086fc9994adce06<commit_after>
|
#!/usr/bin/env python3
"""Demonstrates on board LED support with correct polarity.
Implements simple LED chaser.
"""
from time import sleep
from gpiozero import LED
from aiy.pins import (PIN_A, PIN_B, PIN_C, PIN_D)
leds = (LED(PIN_A), LED(PIN_B), LED(PIN_C), LED(PIN_D))
while True:
for led in leds:
led.on()
sleep(0.5)
led.off()
|
Add simple led chaser example.
Change-Id: I18a5dc49d115d9f222ea2445a086fc9994adce06#!/usr/bin/env python3
"""Demonstrates on board LED support with correct polarity.
Implements simple LED chaser.
"""
from time import sleep
from gpiozero import LED
from aiy.pins import (PIN_A, PIN_B, PIN_C, PIN_D)
leds = (LED(PIN_A), LED(PIN_B), LED(PIN_C), LED(PIN_D))
while True:
for led in leds:
led.on()
sleep(0.5)
led.off()
|
<commit_before><commit_msg>Add simple led chaser example.
Change-Id: I18a5dc49d115d9f222ea2445a086fc9994adce06<commit_after>#!/usr/bin/env python3
"""Demonstrates on board LED support with correct polarity.
Implements simple LED chaser.
"""
from time import sleep
from gpiozero import LED
from aiy.pins import (PIN_A, PIN_B, PIN_C, PIN_D)
leds = (LED(PIN_A), LED(PIN_B), LED(PIN_C), LED(PIN_D))
while True:
for led in leds:
led.on()
sleep(0.5)
led.off()
|
|
6d1439e39a2970eaa7c98cb0ee69b9b954fd1c16
|
migrations/versions/0168_hidden_templates.py
|
migrations/versions/0168_hidden_templates.py
|
"""
Revision ID: 0168_hidden_templates
Revises: 0167_add_precomp_letter_svc_perm
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
import sqlalchemy as sa
revision = '0168_hidden_templates'
down_revision = '0167_add_precomp_letter_svc_perm'
def upgrade():
op.add_column('templates', sa.Column('hidden', sa.Boolean(), nullable=True))
op.add_column('templates_history', sa.Column('hidden', sa.Boolean(), nullable=True))
op.execute('UPDATE templates SET hidden=false')
op.execute('UPDATE templates_history SET hidden=false')
op.alter_column('templates', 'hidden', nullable=False)
op.alter_column('templates_history', 'hidden', nullable=False)
def downgrade():
op.drop_column('templates_history', 'hidden')
op.drop_column('templates', 'hidden')
|
Add a DB migration to create Templates.hidden column
|
Add a DB migration to create Templates.hidden column
Creates the column as nullable, sets the value to false for all
existing templates and template versions and then applies a
not-nullable constraint.
All future Templates are created with `False` as the default set
in SQLAlchemy.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add a DB migration to create Templates.hidden column
Creates the column as nullable, sets the value to false for all
existing templates and template versions and then applies a
not-nullable constraint.
All future Templates are created with `False` as the default set
in SQLAlchemy.
|
"""
Revision ID: 0168_hidden_templates
Revises: 0167_add_precomp_letter_svc_perm
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
import sqlalchemy as sa
revision = '0168_hidden_templates'
down_revision = '0167_add_precomp_letter_svc_perm'
def upgrade():
op.add_column('templates', sa.Column('hidden', sa.Boolean(), nullable=True))
op.add_column('templates_history', sa.Column('hidden', sa.Boolean(), nullable=True))
op.execute('UPDATE templates SET hidden=false')
op.execute('UPDATE templates_history SET hidden=false')
op.alter_column('templates', 'hidden', nullable=False)
op.alter_column('templates_history', 'hidden', nullable=False)
def downgrade():
op.drop_column('templates_history', 'hidden')
op.drop_column('templates', 'hidden')
|
<commit_before><commit_msg>Add a DB migration to create Templates.hidden column
Creates the column as nullable, sets the value to false for all
existing templates and template versions and then applies a
not-nullable constraint.
All future Templates are created with `False` as the default set
in SQLAlchemy.<commit_after>
|
"""
Revision ID: 0168_hidden_templates
Revises: 0167_add_precomp_letter_svc_perm
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
import sqlalchemy as sa
revision = '0168_hidden_templates'
down_revision = '0167_add_precomp_letter_svc_perm'
def upgrade():
op.add_column('templates', sa.Column('hidden', sa.Boolean(), nullable=True))
op.add_column('templates_history', sa.Column('hidden', sa.Boolean(), nullable=True))
op.execute('UPDATE templates SET hidden=false')
op.execute('UPDATE templates_history SET hidden=false')
op.alter_column('templates', 'hidden', nullable=False)
op.alter_column('templates_history', 'hidden', nullable=False)
def downgrade():
op.drop_column('templates_history', 'hidden')
op.drop_column('templates', 'hidden')
|
Add a DB migration to create Templates.hidden column
Creates the column as nullable, sets the value to false for all
existing templates and template versions and then applies a
not-nullable constraint.
All future Templates are created with `False` as the default set
in SQLAlchemy."""
Revision ID: 0168_hidden_templates
Revises: 0167_add_precomp_letter_svc_perm
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
import sqlalchemy as sa
revision = '0168_hidden_templates'
down_revision = '0167_add_precomp_letter_svc_perm'
def upgrade():
op.add_column('templates', sa.Column('hidden', sa.Boolean(), nullable=True))
op.add_column('templates_history', sa.Column('hidden', sa.Boolean(), nullable=True))
op.execute('UPDATE templates SET hidden=false')
op.execute('UPDATE templates_history SET hidden=false')
op.alter_column('templates', 'hidden', nullable=False)
op.alter_column('templates_history', 'hidden', nullable=False)
def downgrade():
op.drop_column('templates_history', 'hidden')
op.drop_column('templates', 'hidden')
|
<commit_before><commit_msg>Add a DB migration to create Templates.hidden column
Creates the column as nullable, sets the value to false for all
existing templates and template versions and then applies a
not-nullable constraint.
All future Templates are created with `False` as the default set
in SQLAlchemy.<commit_after>"""
Revision ID: 0168_hidden_templates
Revises: 0167_add_precomp_letter_svc_perm
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
import sqlalchemy as sa
revision = '0168_hidden_templates'
down_revision = '0167_add_precomp_letter_svc_perm'
def upgrade():
op.add_column('templates', sa.Column('hidden', sa.Boolean(), nullable=True))
op.add_column('templates_history', sa.Column('hidden', sa.Boolean(), nullable=True))
op.execute('UPDATE templates SET hidden=false')
op.execute('UPDATE templates_history SET hidden=false')
op.alter_column('templates', 'hidden', nullable=False)
op.alter_column('templates_history', 'hidden', nullable=False)
def downgrade():
op.drop_column('templates_history', 'hidden')
op.drop_column('templates', 'hidden')
|
|
8b410e8876835133b7f18e42754da7c793ce950f
|
migrations/versions/0226_new_letter_rates.py
|
migrations/versions/0226_new_letter_rates.py
|
"""empty message
Revision ID: 0226_new_letter_rates
Revises: 0225_another_letter_org
"""
revision = '0226_new_letter_rates'
down_revision = '0225_another_letter_org'
import uuid
from datetime import datetime
from alembic import op
start = datetime(2018, 10, 1, 0, 0, tzinfo=timezone.utc)
NEW_RATES = [
(uuid.uuid4(), start, 1, 0.30, True, 'second'),
(uuid.uuid4(), start, 1, 0.30, False, 'second'),
(uuid.uuid4(), start, 2, 0.35, True, 'second'),
(uuid.uuid4(), start, 2, 0.35, False, 'second'),
(uuid.uuid4(), start, 3, 0.40, True, 'second'),
(uuid.uuid4(), start, 3, 0.40, False, 'second'),
(uuid.uuid4(), start, 4, 0.45, True, 'second'),
(uuid.uuid4(), start, 4, 0.45, False, 'second'),
(uuid.uuid4(), start, 5, 0.50, True, 'second'),
(uuid.uuid4(), start, 5, 0.50, False, 'second'),
(uuid.uuid4(), start, 1, 0.56, True, 'first'),
(uuid.uuid4(), start, 1, 0.56, False, 'first'),
(uuid.uuid4(), start, 2, 0.61, True, 'first'),
(uuid.uuid4(), start, 2, 0.61, False, 'first'),
(uuid.uuid4(), start, 3, 0.66, True, 'first'),
(uuid.uuid4(), start, 3, 0.66, False, 'first'),
(uuid.uuid4(), start, 4, 0.71, True, 'first'),
(uuid.uuid4(), start, 4, 0.71, False, 'first'),
(uuid.uuid4(), start, 5, 0.76, True, 'first'),
(uuid.uuid4(), start, 5, 0.76, False, 'first'),
]
def upgrade():
conn = op.get_bind()
for id, start_date, sheet_count, rate, crown, post_class in NEW_RATES:
conn.execute("""
INSERT INTO letter_rates (id, start_date, sheet_count, rate, crown, post_class)
VALUES ('{}', '{}', '{}', '{}', '{}', '{}')
""".format(id, start_date, sheet_count, rate, crown, post_class))
def downgrade():
pass
|
Add new letter rates from 1st of October 2018
|
Add new letter rates from 1st of October 2018
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Add new letter rates from 1st of October 2018
|
"""empty message
Revision ID: 0226_new_letter_rates
Revises: 0225_another_letter_org
"""
revision = '0226_new_letter_rates'
down_revision = '0225_another_letter_org'
import uuid
from datetime import datetime
from alembic import op
start = datetime(2018, 10, 1, 0, 0, tzinfo=timezone.utc)
NEW_RATES = [
(uuid.uuid4(), start, 1, 0.30, True, 'second'),
(uuid.uuid4(), start, 1, 0.30, False, 'second'),
(uuid.uuid4(), start, 2, 0.35, True, 'second'),
(uuid.uuid4(), start, 2, 0.35, False, 'second'),
(uuid.uuid4(), start, 3, 0.40, True, 'second'),
(uuid.uuid4(), start, 3, 0.40, False, 'second'),
(uuid.uuid4(), start, 4, 0.45, True, 'second'),
(uuid.uuid4(), start, 4, 0.45, False, 'second'),
(uuid.uuid4(), start, 5, 0.50, True, 'second'),
(uuid.uuid4(), start, 5, 0.50, False, 'second'),
(uuid.uuid4(), start, 1, 0.56, True, 'first'),
(uuid.uuid4(), start, 1, 0.56, False, 'first'),
(uuid.uuid4(), start, 2, 0.61, True, 'first'),
(uuid.uuid4(), start, 2, 0.61, False, 'first'),
(uuid.uuid4(), start, 3, 0.66, True, 'first'),
(uuid.uuid4(), start, 3, 0.66, False, 'first'),
(uuid.uuid4(), start, 4, 0.71, True, 'first'),
(uuid.uuid4(), start, 4, 0.71, False, 'first'),
(uuid.uuid4(), start, 5, 0.76, True, 'first'),
(uuid.uuid4(), start, 5, 0.76, False, 'first'),
]
def upgrade():
conn = op.get_bind()
for id, start_date, sheet_count, rate, crown, post_class in NEW_RATES:
conn.execute("""
INSERT INTO letter_rates (id, start_date, sheet_count, rate, crown, post_class)
VALUES ('{}', '{}', '{}', '{}', '{}', '{}')
""".format(id, start_date, sheet_count, rate, crown, post_class))
def downgrade():
pass
|
<commit_before><commit_msg>Add new letter rates from 1st of October 2018<commit_after>
|
"""empty message
Revision ID: 0226_new_letter_rates
Revises: 0225_another_letter_org
"""
revision = '0226_new_letter_rates'
down_revision = '0225_another_letter_org'
import uuid
from datetime import datetime
from alembic import op
start = datetime(2018, 10, 1, 0, 0, tzinfo=timezone.utc)
NEW_RATES = [
(uuid.uuid4(), start, 1, 0.30, True, 'second'),
(uuid.uuid4(), start, 1, 0.30, False, 'second'),
(uuid.uuid4(), start, 2, 0.35, True, 'second'),
(uuid.uuid4(), start, 2, 0.35, False, 'second'),
(uuid.uuid4(), start, 3, 0.40, True, 'second'),
(uuid.uuid4(), start, 3, 0.40, False, 'second'),
(uuid.uuid4(), start, 4, 0.45, True, 'second'),
(uuid.uuid4(), start, 4, 0.45, False, 'second'),
(uuid.uuid4(), start, 5, 0.50, True, 'second'),
(uuid.uuid4(), start, 5, 0.50, False, 'second'),
(uuid.uuid4(), start, 1, 0.56, True, 'first'),
(uuid.uuid4(), start, 1, 0.56, False, 'first'),
(uuid.uuid4(), start, 2, 0.61, True, 'first'),
(uuid.uuid4(), start, 2, 0.61, False, 'first'),
(uuid.uuid4(), start, 3, 0.66, True, 'first'),
(uuid.uuid4(), start, 3, 0.66, False, 'first'),
(uuid.uuid4(), start, 4, 0.71, True, 'first'),
(uuid.uuid4(), start, 4, 0.71, False, 'first'),
(uuid.uuid4(), start, 5, 0.76, True, 'first'),
(uuid.uuid4(), start, 5, 0.76, False, 'first'),
]
def upgrade():
conn = op.get_bind()
for id, start_date, sheet_count, rate, crown, post_class in NEW_RATES:
conn.execute("""
INSERT INTO letter_rates (id, start_date, sheet_count, rate, crown, post_class)
VALUES ('{}', '{}', '{}', '{}', '{}', '{}')
""".format(id, start_date, sheet_count, rate, crown, post_class))
def downgrade():
pass
|
Add new letter rates from 1st of October 2018"""empty message
Revision ID: 0226_new_letter_rates
Revises: 0225_another_letter_org
"""
revision = '0226_new_letter_rates'
down_revision = '0225_another_letter_org'
import uuid
from datetime import datetime
from alembic import op
start = datetime(2018, 10, 1, 0, 0, tzinfo=timezone.utc)
NEW_RATES = [
(uuid.uuid4(), start, 1, 0.30, True, 'second'),
(uuid.uuid4(), start, 1, 0.30, False, 'second'),
(uuid.uuid4(), start, 2, 0.35, True, 'second'),
(uuid.uuid4(), start, 2, 0.35, False, 'second'),
(uuid.uuid4(), start, 3, 0.40, True, 'second'),
(uuid.uuid4(), start, 3, 0.40, False, 'second'),
(uuid.uuid4(), start, 4, 0.45, True, 'second'),
(uuid.uuid4(), start, 4, 0.45, False, 'second'),
(uuid.uuid4(), start, 5, 0.50, True, 'second'),
(uuid.uuid4(), start, 5, 0.50, False, 'second'),
(uuid.uuid4(), start, 1, 0.56, True, 'first'),
(uuid.uuid4(), start, 1, 0.56, False, 'first'),
(uuid.uuid4(), start, 2, 0.61, True, 'first'),
(uuid.uuid4(), start, 2, 0.61, False, 'first'),
(uuid.uuid4(), start, 3, 0.66, True, 'first'),
(uuid.uuid4(), start, 3, 0.66, False, 'first'),
(uuid.uuid4(), start, 4, 0.71, True, 'first'),
(uuid.uuid4(), start, 4, 0.71, False, 'first'),
(uuid.uuid4(), start, 5, 0.76, True, 'first'),
(uuid.uuid4(), start, 5, 0.76, False, 'first'),
]
def upgrade():
conn = op.get_bind()
for id, start_date, sheet_count, rate, crown, post_class in NEW_RATES:
conn.execute("""
INSERT INTO letter_rates (id, start_date, sheet_count, rate, crown, post_class)
VALUES ('{}', '{}', '{}', '{}', '{}', '{}')
""".format(id, start_date, sheet_count, rate, crown, post_class))
def downgrade():
pass
|
<commit_before><commit_msg>Add new letter rates from 1st of October 2018<commit_after>"""empty message
Revision ID: 0226_new_letter_rates
Revises: 0225_another_letter_org
"""
revision = '0226_new_letter_rates'
down_revision = '0225_another_letter_org'
import uuid
from datetime import datetime
from alembic import op
start = datetime(2018, 10, 1, 0, 0, tzinfo=timezone.utc)
NEW_RATES = [
(uuid.uuid4(), start, 1, 0.30, True, 'second'),
(uuid.uuid4(), start, 1, 0.30, False, 'second'),
(uuid.uuid4(), start, 2, 0.35, True, 'second'),
(uuid.uuid4(), start, 2, 0.35, False, 'second'),
(uuid.uuid4(), start, 3, 0.40, True, 'second'),
(uuid.uuid4(), start, 3, 0.40, False, 'second'),
(uuid.uuid4(), start, 4, 0.45, True, 'second'),
(uuid.uuid4(), start, 4, 0.45, False, 'second'),
(uuid.uuid4(), start, 5, 0.50, True, 'second'),
(uuid.uuid4(), start, 5, 0.50, False, 'second'),
(uuid.uuid4(), start, 1, 0.56, True, 'first'),
(uuid.uuid4(), start, 1, 0.56, False, 'first'),
(uuid.uuid4(), start, 2, 0.61, True, 'first'),
(uuid.uuid4(), start, 2, 0.61, False, 'first'),
(uuid.uuid4(), start, 3, 0.66, True, 'first'),
(uuid.uuid4(), start, 3, 0.66, False, 'first'),
(uuid.uuid4(), start, 4, 0.71, True, 'first'),
(uuid.uuid4(), start, 4, 0.71, False, 'first'),
(uuid.uuid4(), start, 5, 0.76, True, 'first'),
(uuid.uuid4(), start, 5, 0.76, False, 'first'),
]
def upgrade():
conn = op.get_bind()
for id, start_date, sheet_count, rate, crown, post_class in NEW_RATES:
conn.execute("""
INSERT INTO letter_rates (id, start_date, sheet_count, rate, crown, post_class)
VALUES ('{}', '{}', '{}', '{}', '{}', '{}')
""".format(id, start_date, sheet_count, rate, crown, post_class))
def downgrade():
pass
|
|
d7c2fdca488761aecc2ba1cfd80e44af3de2ea2b
|
go/vumitools/opt_out/utils.py
|
go/vumitools/opt_out/utils.py
|
from vumi.config import Config, ConfigBool, ConfigList
class OptOutHelperConfig(Config):
case_sensitive = ConfigBool(
"Whether case sensitivity should be enforced when checking message "
"content for opt outs",
default=False, static=True)
optout_keywords = ConfigList(
"List of the keywords which count as opt outs",
default=(), static=True)
|
Add a config class for an opt out helper
|
Add a config class for an opt out helper
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
Add a config class for an opt out helper
|
from vumi.config import Config, ConfigBool, ConfigList
class OptOutHelperConfig(Config):
case_sensitive = ConfigBool(
"Whether case sensitivity should be enforced when checking message "
"content for opt outs",
default=False, static=True)
optout_keywords = ConfigList(
"List of the keywords which count as opt outs",
default=(), static=True)
|
<commit_before><commit_msg>Add a config class for an opt out helper<commit_after>
|
from vumi.config import Config, ConfigBool, ConfigList
class OptOutHelperConfig(Config):
case_sensitive = ConfigBool(
"Whether case sensitivity should be enforced when checking message "
"content for opt outs",
default=False, static=True)
optout_keywords = ConfigList(
"List of the keywords which count as opt outs",
default=(), static=True)
|
Add a config class for an opt out helperfrom vumi.config import Config, ConfigBool, ConfigList
class OptOutHelperConfig(Config):
case_sensitive = ConfigBool(
"Whether case sensitivity should be enforced when checking message "
"content for opt outs",
default=False, static=True)
optout_keywords = ConfigList(
"List of the keywords which count as opt outs",
default=(), static=True)
|
<commit_before><commit_msg>Add a config class for an opt out helper<commit_after>from vumi.config import Config, ConfigBool, ConfigList
class OptOutHelperConfig(Config):
case_sensitive = ConfigBool(
"Whether case sensitivity should be enforced when checking message "
"content for opt outs",
default=False, static=True)
optout_keywords = ConfigList(
"List of the keywords which count as opt outs",
default=(), static=True)
|
|
7d55f2a8fc5db6092abe1965da6c5ad0046a8d1e
|
populate_rango.py
|
populate_rango.py
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
suits_cat = add_cat('Suits')
add_page(cat=suits_cat,
title = "Blue suit")
add_page(cat=suits_cat,
title = "Georgio Armani")
add_page(cat=suits_cat,
title = "Zoot suit")
pants_cat = add_cat('Pants')
add_page(cat=pants_cat, title= "Pinstripe")
add_page(cat=pants_cat, title="Jeans")
def add_page(cat, title, views=0):
page = Page.objects.get_or_create(category=cat, title=title)[0]
page.url = ''
page.views = 0
page.save()
return page
def add_cat(name):
cat = Category.objects.get_or_create(name=name)[0]
return cat
if __name__ == '__main__':
print("Starting Rango population script...")
populate()
|
Complete population script and seeding
|
Complete population script and seeding
|
Python
|
mit
|
dnestoff/Tango-With-Django,dnestoff/Tango-With-Django
|
Complete population script and seeding
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
suits_cat = add_cat('Suits')
add_page(cat=suits_cat,
title = "Blue suit")
add_page(cat=suits_cat,
title = "Georgio Armani")
add_page(cat=suits_cat,
title = "Zoot suit")
pants_cat = add_cat('Pants')
add_page(cat=pants_cat, title= "Pinstripe")
add_page(cat=pants_cat, title="Jeans")
def add_page(cat, title, views=0):
page = Page.objects.get_or_create(category=cat, title=title)[0]
page.url = ''
page.views = 0
page.save()
return page
def add_cat(name):
cat = Category.objects.get_or_create(name=name)[0]
return cat
if __name__ == '__main__':
print("Starting Rango population script...")
populate()
|
<commit_before><commit_msg>Complete population script and seeding<commit_after>
|
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
suits_cat = add_cat('Suits')
add_page(cat=suits_cat,
title = "Blue suit")
add_page(cat=suits_cat,
title = "Georgio Armani")
add_page(cat=suits_cat,
title = "Zoot suit")
pants_cat = add_cat('Pants')
add_page(cat=pants_cat, title= "Pinstripe")
add_page(cat=pants_cat, title="Jeans")
def add_page(cat, title, views=0):
page = Page.objects.get_or_create(category=cat, title=title)[0]
page.url = ''
page.views = 0
page.save()
return page
def add_cat(name):
cat = Category.objects.get_or_create(name=name)[0]
return cat
if __name__ == '__main__':
print("Starting Rango population script...")
populate()
|
Complete population script and seedingimport os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
suits_cat = add_cat('Suits')
add_page(cat=suits_cat,
title = "Blue suit")
add_page(cat=suits_cat,
title = "Georgio Armani")
add_page(cat=suits_cat,
title = "Zoot suit")
pants_cat = add_cat('Pants')
add_page(cat=pants_cat, title= "Pinstripe")
add_page(cat=pants_cat, title="Jeans")
def add_page(cat, title, views=0):
page = Page.objects.get_or_create(category=cat, title=title)[0]
page.url = ''
page.views = 0
page.save()
return page
def add_cat(name):
cat = Category.objects.get_or_create(name=name)[0]
return cat
if __name__ == '__main__':
print("Starting Rango population script...")
populate()
|
<commit_before><commit_msg>Complete population script and seeding<commit_after>import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
suits_cat = add_cat('Suits')
add_page(cat=suits_cat,
title = "Blue suit")
add_page(cat=suits_cat,
title = "Georgio Armani")
add_page(cat=suits_cat,
title = "Zoot suit")
pants_cat = add_cat('Pants')
add_page(cat=pants_cat, title= "Pinstripe")
add_page(cat=pants_cat, title="Jeans")
def add_page(cat, title, views=0):
page = Page.objects.get_or_create(category=cat, title=title)[0]
page.url = ''
page.views = 0
page.save()
return page
def add_cat(name):
cat = Category.objects.get_or_create(name=name)[0]
return cat
if __name__ == '__main__':
print("Starting Rango population script...")
populate()
|
|
00a52ec4453ee0bcd498c439e411bfcaad41a4fc
|
scripts/update-alfred.py
|
scripts/update-alfred.py
|
from __future__ import unicode_literals
import binascii
import json
import os
import plistlib
import sys
import unicodedata
import zipfile
def make_random_uid():
return binascii.b2a_hex(os.urandom(15))
def titlecase_phrase(phrase):
return ' '.join([word.title() for word in phrase])
def make_emoji_name(emoji, shortcut):
try:
return titlecase_phrase(unicodedata.name(emoji).split())
except (ValueError, TypeError):
return titlecase_phrase(shortcut.strip(':').split('_'))
def main():
scripts_directory = os.path.dirname(os.path.realpath(__file__))
emoji_substitutions_file = os.path.join(scripts_directory,
'../emoji substitutions.plist')
emoji_substitutions = plistlib.readPlist(emoji_substitutions_file)
outfile = '../Emoji.alfredsnippets'
with zipfile.ZipFile(outfile, 'w') as snippets:
for item in emoji_substitutions:
phrase, shortcut = item['phrase'], item['shortcut']
uid = make_random_uid()
name = make_emoji_name(phrase, shortcut)
snippets.writestr(
'{} [{}].json'.format(name, uid),
json.dumps({
"alfredsnippet" : {
"snippet" : phrase,
"uid" : uid,
"name" : name,
"keyword" : shortcut
}
})
)
if __name__ == "__main__":
sys.exit(main())
|
Add Python script to create .alfredsnippets file from plist.
|
Add Python script to create .alfredsnippets file from plist.
|
Python
|
mit
|
warpling/Macmoji,warpling/Macmoji
|
Add Python script to create .alfredsnippets file from plist.
|
from __future__ import unicode_literals
import binascii
import json
import os
import plistlib
import sys
import unicodedata
import zipfile
def make_random_uid():
return binascii.b2a_hex(os.urandom(15))
def titlecase_phrase(phrase):
return ' '.join([word.title() for word in phrase])
def make_emoji_name(emoji, shortcut):
try:
return titlecase_phrase(unicodedata.name(emoji).split())
except (ValueError, TypeError):
return titlecase_phrase(shortcut.strip(':').split('_'))
def main():
scripts_directory = os.path.dirname(os.path.realpath(__file__))
emoji_substitutions_file = os.path.join(scripts_directory,
'../emoji substitutions.plist')
emoji_substitutions = plistlib.readPlist(emoji_substitutions_file)
outfile = '../Emoji.alfredsnippets'
with zipfile.ZipFile(outfile, 'w') as snippets:
for item in emoji_substitutions:
phrase, shortcut = item['phrase'], item['shortcut']
uid = make_random_uid()
name = make_emoji_name(phrase, shortcut)
snippets.writestr(
'{} [{}].json'.format(name, uid),
json.dumps({
"alfredsnippet" : {
"snippet" : phrase,
"uid" : uid,
"name" : name,
"keyword" : shortcut
}
})
)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add Python script to create .alfredsnippets file from plist.<commit_after>
|
from __future__ import unicode_literals
import binascii
import json
import os
import plistlib
import sys
import unicodedata
import zipfile
def make_random_uid():
return binascii.b2a_hex(os.urandom(15))
def titlecase_phrase(phrase):
return ' '.join([word.title() for word in phrase])
def make_emoji_name(emoji, shortcut):
try:
return titlecase_phrase(unicodedata.name(emoji).split())
except (ValueError, TypeError):
return titlecase_phrase(shortcut.strip(':').split('_'))
def main():
scripts_directory = os.path.dirname(os.path.realpath(__file__))
emoji_substitutions_file = os.path.join(scripts_directory,
'../emoji substitutions.plist')
emoji_substitutions = plistlib.readPlist(emoji_substitutions_file)
outfile = '../Emoji.alfredsnippets'
with zipfile.ZipFile(outfile, 'w') as snippets:
for item in emoji_substitutions:
phrase, shortcut = item['phrase'], item['shortcut']
uid = make_random_uid()
name = make_emoji_name(phrase, shortcut)
snippets.writestr(
'{} [{}].json'.format(name, uid),
json.dumps({
"alfredsnippet" : {
"snippet" : phrase,
"uid" : uid,
"name" : name,
"keyword" : shortcut
}
})
)
if __name__ == "__main__":
sys.exit(main())
|
Add Python script to create .alfredsnippets file from plist.from __future__ import unicode_literals
import binascii
import json
import os
import plistlib
import sys
import unicodedata
import zipfile
def make_random_uid():
return binascii.b2a_hex(os.urandom(15))
def titlecase_phrase(phrase):
return ' '.join([word.title() for word in phrase])
def make_emoji_name(emoji, shortcut):
try:
return titlecase_phrase(unicodedata.name(emoji).split())
except (ValueError, TypeError):
return titlecase_phrase(shortcut.strip(':').split('_'))
def main():
scripts_directory = os.path.dirname(os.path.realpath(__file__))
emoji_substitutions_file = os.path.join(scripts_directory,
'../emoji substitutions.plist')
emoji_substitutions = plistlib.readPlist(emoji_substitutions_file)
outfile = '../Emoji.alfredsnippets'
with zipfile.ZipFile(outfile, 'w') as snippets:
for item in emoji_substitutions:
phrase, shortcut = item['phrase'], item['shortcut']
uid = make_random_uid()
name = make_emoji_name(phrase, shortcut)
snippets.writestr(
'{} [{}].json'.format(name, uid),
json.dumps({
"alfredsnippet" : {
"snippet" : phrase,
"uid" : uid,
"name" : name,
"keyword" : shortcut
}
})
)
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add Python script to create .alfredsnippets file from plist.<commit_after>from __future__ import unicode_literals
import binascii
import json
import os
import plistlib
import sys
import unicodedata
import zipfile
def make_random_uid():
return binascii.b2a_hex(os.urandom(15))
def titlecase_phrase(phrase):
return ' '.join([word.title() for word in phrase])
def make_emoji_name(emoji, shortcut):
try:
return titlecase_phrase(unicodedata.name(emoji).split())
except (ValueError, TypeError):
return titlecase_phrase(shortcut.strip(':').split('_'))
def main():
scripts_directory = os.path.dirname(os.path.realpath(__file__))
emoji_substitutions_file = os.path.join(scripts_directory,
'../emoji substitutions.plist')
emoji_substitutions = plistlib.readPlist(emoji_substitutions_file)
outfile = '../Emoji.alfredsnippets'
with zipfile.ZipFile(outfile, 'w') as snippets:
for item in emoji_substitutions:
phrase, shortcut = item['phrase'], item['shortcut']
uid = make_random_uid()
name = make_emoji_name(phrase, shortcut)
snippets.writestr(
'{} [{}].json'.format(name, uid),
json.dumps({
"alfredsnippet" : {
"snippet" : phrase,
"uid" : uid,
"name" : name,
"keyword" : shortcut
}
})
)
if __name__ == "__main__":
sys.exit(main())
|
|
b1eba723dbdc068558ab34cc226c32bcb8bfa2ef
|
intake_bluesky/tests/test_core.py
|
intake_bluesky/tests/test_core.py
|
import event_model
from intake_bluesky.core import documents_to_xarray
def test_no_descriptors():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
def test_no_events():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
desc_bundle = run_bundle.compose_descriptor(
data_keys={'x': {'source': '...', 'shape': [], 'dtype': 'number'}},
name='primary')
descriptor_doc = desc_bundle.descriptor_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[descriptor_doc],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
|
Test runs with no descriptors or no events.
|
TST: Test runs with no descriptors or no events.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
TST: Test runs with no descriptors or no events.
|
import event_model
from intake_bluesky.core import documents_to_xarray
def test_no_descriptors():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
def test_no_events():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
desc_bundle = run_bundle.compose_descriptor(
data_keys={'x': {'source': '...', 'shape': [], 'dtype': 'number'}},
name='primary')
descriptor_doc = desc_bundle.descriptor_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[descriptor_doc],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
|
<commit_before><commit_msg>TST: Test runs with no descriptors or no events.<commit_after>
|
import event_model
from intake_bluesky.core import documents_to_xarray
def test_no_descriptors():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
def test_no_events():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
desc_bundle = run_bundle.compose_descriptor(
data_keys={'x': {'source': '...', 'shape': [], 'dtype': 'number'}},
name='primary')
descriptor_doc = desc_bundle.descriptor_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[descriptor_doc],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
|
TST: Test runs with no descriptors or no events.import event_model
from intake_bluesky.core import documents_to_xarray
def test_no_descriptors():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
def test_no_events():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
desc_bundle = run_bundle.compose_descriptor(
data_keys={'x': {'source': '...', 'shape': [], 'dtype': 'number'}},
name='primary')
descriptor_doc = desc_bundle.descriptor_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[descriptor_doc],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
|
<commit_before><commit_msg>TST: Test runs with no descriptors or no events.<commit_after>import event_model
from intake_bluesky.core import documents_to_xarray
def test_no_descriptors():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
def test_no_events():
run_bundle = event_model.compose_run()
start_doc = run_bundle.start_doc
desc_bundle = run_bundle.compose_descriptor(
data_keys={'x': {'source': '...', 'shape': [], 'dtype': 'number'}},
name='primary')
descriptor_doc = desc_bundle.descriptor_doc
stop_doc = run_bundle.compose_stop()
documents_to_xarray(start_doc=start_doc, stop_doc=stop_doc,
descriptor_docs=[descriptor_doc],
event_docs=[],
filler=event_model.Filler({}),
get_resource=None,
get_datum=None,
get_datum_cursor=None)
|
|
16956e0d482e871ba2ff6800a107bd57ea0fdd9b
|
estimators/admin.py
|
estimators/admin.py
|
from django.contrib import admin
from estimators.models import DataSet, Estimator, EvaluationResult
admin.site.register(Estimator)
admin.site.register(DataSet)
admin.site.register(EvaluationResult)
|
Add Basic Admin Functionality for Estimators, EvaluationResult and DataSet
|
Add Basic Admin Functionality for Estimators, EvaluationResult and DataSet
|
Python
|
mit
|
fridiculous/django-estimators
|
Add Basic Admin Functionality for Estimators, EvaluationResult and DataSet
|
from django.contrib import admin
from estimators.models import DataSet, Estimator, EvaluationResult
admin.site.register(Estimator)
admin.site.register(DataSet)
admin.site.register(EvaluationResult)
|
<commit_before><commit_msg>Add Basic Admin Functionality for Estimators, EvaluationResult and DataSet<commit_after>
|
from django.contrib import admin
from estimators.models import DataSet, Estimator, EvaluationResult
admin.site.register(Estimator)
admin.site.register(DataSet)
admin.site.register(EvaluationResult)
|
Add Basic Admin Functionality for Estimators, EvaluationResult and DataSetfrom django.contrib import admin
from estimators.models import DataSet, Estimator, EvaluationResult
admin.site.register(Estimator)
admin.site.register(DataSet)
admin.site.register(EvaluationResult)
|
<commit_before><commit_msg>Add Basic Admin Functionality for Estimators, EvaluationResult and DataSet<commit_after>from django.contrib import admin
from estimators.models import DataSet, Estimator, EvaluationResult
admin.site.register(Estimator)
admin.site.register(DataSet)
admin.site.register(EvaluationResult)
|
|
ae1304041915592eb4ef20e3633b97aa30034bd7
|
examples/lvm_thin.py
|
examples/lvm_thin.py
|
import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new 40 GiB thin pool
pool = b.new_lv(thin_pool=True, size=Size("40 GiB"), parents=[vg])
b.create_device(pool)
# new 20 GiB thin lv
thinlv = b.new_lv(thin_volume=True, size=Size("20 GiB"), parents=[pool],
fmt_type="ext4")
b.create_device(thinlv)
# new snapshot of the thin volume we just created
snap = b.new_lv(name=thinlv.name + "_snapshot", parents=[pool], origin=thinlv,
seg_type="thin")
b.create_device(snap)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
|
Add example for LVM thin provisioning
|
Add example for LVM thin provisioning
|
Python
|
lgpl-2.1
|
vojtechtrefny/blivet,vojtechtrefny/blivet,rvykydal/blivet,rvykydal/blivet
|
Add example for LVM thin provisioning
|
import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new 40 GiB thin pool
pool = b.new_lv(thin_pool=True, size=Size("40 GiB"), parents=[vg])
b.create_device(pool)
# new 20 GiB thin lv
thinlv = b.new_lv(thin_volume=True, size=Size("20 GiB"), parents=[pool],
fmt_type="ext4")
b.create_device(thinlv)
# new snapshot of the thin volume we just created
snap = b.new_lv(name=thinlv.name + "_snapshot", parents=[pool], origin=thinlv,
seg_type="thin")
b.create_device(snap)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
|
<commit_before><commit_msg>Add example for LVM thin provisioning<commit_after>
|
import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new 40 GiB thin pool
pool = b.new_lv(thin_pool=True, size=Size("40 GiB"), parents=[vg])
b.create_device(pool)
# new 20 GiB thin lv
thinlv = b.new_lv(thin_volume=True, size=Size("20 GiB"), parents=[pool],
fmt_type="ext4")
b.create_device(thinlv)
# new snapshot of the thin volume we just created
snap = b.new_lv(name=thinlv.name + "_snapshot", parents=[pool], origin=thinlv,
seg_type="thin")
b.create_device(snap)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
|
Add example for LVM thin provisioningimport os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new 40 GiB thin pool
pool = b.new_lv(thin_pool=True, size=Size("40 GiB"), parents=[vg])
b.create_device(pool)
# new 20 GiB thin lv
thinlv = b.new_lv(thin_volume=True, size=Size("20 GiB"), parents=[pool],
fmt_type="ext4")
b.create_device(thinlv)
# new snapshot of the thin volume we just created
snap = b.new_lv(name=thinlv.name + "_snapshot", parents=[pool], origin=thinlv,
seg_type="thin")
b.create_device(snap)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
|
<commit_before><commit_msg>Add example for LVM thin provisioning<commit_after>import os
import blivet
from blivet.size import Size
from blivet.util import set_up_logging, create_sparse_tempfile
set_up_logging()
b = blivet.Blivet() # create an instance of Blivet (don't add system devices)
# create a disk image file on which to create new devices
disk1_file = create_sparse_tempfile("disk1", Size("100GiB"))
b.disk_images["disk1"] = disk1_file
b.reset()
try:
disk1 = b.devicetree.get_device_by_name("disk1")
b.initialize_disk(disk1)
pv = b.new_partition(size=Size("50GiB"), fmt_type="lvmpv")
b.create_device(pv)
# allocate the partitions (decide where and on which disks they'll reside)
blivet.partitioning.do_partitioning(b)
vg = b.new_vg(parents=[pv])
b.create_device(vg)
# new 40 GiB thin pool
pool = b.new_lv(thin_pool=True, size=Size("40 GiB"), parents=[vg])
b.create_device(pool)
# new 20 GiB thin lv
thinlv = b.new_lv(thin_volume=True, size=Size("20 GiB"), parents=[pool],
fmt_type="ext4")
b.create_device(thinlv)
# new snapshot of the thin volume we just created
snap = b.new_lv(name=thinlv.name + "_snapshot", parents=[pool], origin=thinlv,
seg_type="thin")
b.create_device(snap)
# write the new partitions to disk and format them as specified
b.do_it()
print(b.devicetree)
finally:
b.devicetree.teardown_disk_images()
os.unlink(disk1_file)
|
|
87acd24224a648407bd0ee538db3c2a1f925a7b1
|
bluebottle/tasks/migrations/0024_auto_20170602_2304.py
|
bluebottle/tasks/migrations/0024_auto_20170602_2304.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-02 21:04
from __future__ import unicode_literals
from django.db import migrations
def add_phaselogs_to_old_tasks(apps, schema_editor):
Task = apps.get_model('tasks', 'Task')
TaskStatusLog = apps.get_model('tasks', 'TaskStatusLog')
for task in Task.objects.filter(taskstatuslog__isnull=True):
log = TaskStatusLog.objects.create(task=task, status=task.status, start=task.deadline)
log.save()
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('tasks', '0023_merge_20170519_1012'),
]
operations = [
migrations.RunPython(add_phaselogs_to_old_tasks, dummy),
]
|
Fix old tasks for stats too
|
Fix old tasks for stats too
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Fix old tasks for stats too
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-02 21:04
from __future__ import unicode_literals
from django.db import migrations
def add_phaselogs_to_old_tasks(apps, schema_editor):
Task = apps.get_model('tasks', 'Task')
TaskStatusLog = apps.get_model('tasks', 'TaskStatusLog')
for task in Task.objects.filter(taskstatuslog__isnull=True):
log = TaskStatusLog.objects.create(task=task, status=task.status, start=task.deadline)
log.save()
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('tasks', '0023_merge_20170519_1012'),
]
operations = [
migrations.RunPython(add_phaselogs_to_old_tasks, dummy),
]
|
<commit_before><commit_msg>Fix old tasks for stats too<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-02 21:04
from __future__ import unicode_literals
from django.db import migrations
def add_phaselogs_to_old_tasks(apps, schema_editor):
Task = apps.get_model('tasks', 'Task')
TaskStatusLog = apps.get_model('tasks', 'TaskStatusLog')
for task in Task.objects.filter(taskstatuslog__isnull=True):
log = TaskStatusLog.objects.create(task=task, status=task.status, start=task.deadline)
log.save()
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('tasks', '0023_merge_20170519_1012'),
]
operations = [
migrations.RunPython(add_phaselogs_to_old_tasks, dummy),
]
|
Fix old tasks for stats too# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-02 21:04
from __future__ import unicode_literals
from django.db import migrations
def add_phaselogs_to_old_tasks(apps, schema_editor):
Task = apps.get_model('tasks', 'Task')
TaskStatusLog = apps.get_model('tasks', 'TaskStatusLog')
for task in Task.objects.filter(taskstatuslog__isnull=True):
log = TaskStatusLog.objects.create(task=task, status=task.status, start=task.deadline)
log.save()
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('tasks', '0023_merge_20170519_1012'),
]
operations = [
migrations.RunPython(add_phaselogs_to_old_tasks, dummy),
]
|
<commit_before><commit_msg>Fix old tasks for stats too<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-06-02 21:04
from __future__ import unicode_literals
from django.db import migrations
def add_phaselogs_to_old_tasks(apps, schema_editor):
Task = apps.get_model('tasks', 'Task')
TaskStatusLog = apps.get_model('tasks', 'TaskStatusLog')
for task in Task.objects.filter(taskstatuslog__isnull=True):
log = TaskStatusLog.objects.create(task=task, status=task.status, start=task.deadline)
log.save()
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('tasks', '0023_merge_20170519_1012'),
]
operations = [
migrations.RunPython(add_phaselogs_to_old_tasks, dummy),
]
|
|
dcfa7bfa11bea86d831959a217b558d704ece078
|
ensemble/ctf/tests/test_manager.py
|
ensemble/ctf/tests/test_manager.py
|
from contextlib import contextmanager
from os.path import isfile, join
import shutil
import tempfile
from numpy.testing import assert_allclose
from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function
from ensemble.ctf.manager import CTF_EXTENSION, CtfManager
@contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None)
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def sample_function_parts():
return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT)
def test_ctf_manager_add():
name = 'test_function'
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add(name, color_func, alpha_func)
assert isfile(join(root_dir, name + CTF_EXTENSION))
def test_ctf_manager_get():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
ret_color, ret_alpha = manager.get('test')
assert_allclose(ret_color.values(), COLOR_DEFAULT)
assert_allclose(ret_alpha.values(), ALPHA_DEFAULT)
def test_ctf_manager_load():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
del manager
manager = CtfManager.from_directory(root_dir)
manager.get('test')
|
Add unit tests for CtfManager.
|
Add unit tests for CtfManager.
|
Python
|
bsd-3-clause
|
dmsurti/ensemble
|
Add unit tests for CtfManager.
|
from contextlib import contextmanager
from os.path import isfile, join
import shutil
import tempfile
from numpy.testing import assert_allclose
from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function
from ensemble.ctf.manager import CTF_EXTENSION, CtfManager
@contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None)
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def sample_function_parts():
return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT)
def test_ctf_manager_add():
name = 'test_function'
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add(name, color_func, alpha_func)
assert isfile(join(root_dir, name + CTF_EXTENSION))
def test_ctf_manager_get():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
ret_color, ret_alpha = manager.get('test')
assert_allclose(ret_color.values(), COLOR_DEFAULT)
assert_allclose(ret_alpha.values(), ALPHA_DEFAULT)
def test_ctf_manager_load():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
del manager
manager = CtfManager.from_directory(root_dir)
manager.get('test')
|
<commit_before><commit_msg>Add unit tests for CtfManager.<commit_after>
|
from contextlib import contextmanager
from os.path import isfile, join
import shutil
import tempfile
from numpy.testing import assert_allclose
from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function
from ensemble.ctf.manager import CTF_EXTENSION, CtfManager
@contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None)
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def sample_function_parts():
return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT)
def test_ctf_manager_add():
name = 'test_function'
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add(name, color_func, alpha_func)
assert isfile(join(root_dir, name + CTF_EXTENSION))
def test_ctf_manager_get():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
ret_color, ret_alpha = manager.get('test')
assert_allclose(ret_color.values(), COLOR_DEFAULT)
assert_allclose(ret_alpha.values(), ALPHA_DEFAULT)
def test_ctf_manager_load():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
del manager
manager = CtfManager.from_directory(root_dir)
manager.get('test')
|
Add unit tests for CtfManager.from contextlib import contextmanager
from os.path import isfile, join
import shutil
import tempfile
from numpy.testing import assert_allclose
from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function
from ensemble.ctf.manager import CTF_EXTENSION, CtfManager
@contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None)
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def sample_function_parts():
return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT)
def test_ctf_manager_add():
name = 'test_function'
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add(name, color_func, alpha_func)
assert isfile(join(root_dir, name + CTF_EXTENSION))
def test_ctf_manager_get():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
ret_color, ret_alpha = manager.get('test')
assert_allclose(ret_color.values(), COLOR_DEFAULT)
assert_allclose(ret_alpha.values(), ALPHA_DEFAULT)
def test_ctf_manager_load():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
del manager
manager = CtfManager.from_directory(root_dir)
manager.get('test')
|
<commit_before><commit_msg>Add unit tests for CtfManager.<commit_after>from contextlib import contextmanager
from os.path import isfile, join
import shutil
import tempfile
from numpy.testing import assert_allclose
from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function
from ensemble.ctf.manager import CTF_EXTENSION, CtfManager
@contextmanager
def temp_directory():
tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None)
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
def sample_function_parts():
return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT)
def test_ctf_manager_add():
name = 'test_function'
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add(name, color_func, alpha_func)
assert isfile(join(root_dir, name + CTF_EXTENSION))
def test_ctf_manager_get():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
ret_color, ret_alpha = manager.get('test')
assert_allclose(ret_color.values(), COLOR_DEFAULT)
assert_allclose(ret_alpha.values(), ALPHA_DEFAULT)
def test_ctf_manager_load():
color_func, alpha_func = sample_function_parts()
with temp_directory() as root_dir:
manager = CtfManager.from_directory(root_dir)
manager.add('test', color_func, alpha_func)
del manager
manager = CtfManager.from_directory(root_dir)
manager.get('test')
|
|
6b148c2fb003c46d2cf1eec6b81a4720bc3adcd6
|
src/web/views/api/v1/cve.py
|
src/web/views/api/v1/cve.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'])
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
def pre_get_many(search_params=None, **kw):
order_by = [{"field":"published_at", "direction":"desc"}]
if 'order_by' not in search_params:
search_params['order_by'] = []
search_params['order_by'].extend(order_by)
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'],
preprocessors=dict(
GET_MANY=[pre_get_many]))
|
Sort CVE by published_at attribute.
|
[API] Sort CVE by published_at attribute.
|
Python
|
agpl-3.0
|
cedricbonhomme/services,cedricbonhomme/services,cedricbonhomme/services,cedricbonhomme/services
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'])
[API] Sort CVE by published_at attribute.
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
def pre_get_many(search_params=None, **kw):
order_by = [{"field":"published_at", "direction":"desc"}]
if 'order_by' not in search_params:
search_params['order_by'] = []
search_params['order_by'].extend(order_by)
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'],
preprocessors=dict(
GET_MANY=[pre_get_many]))
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'])
<commit_msg>[API] Sort CVE by published_at attribute.<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
def pre_get_many(search_params=None, **kw):
order_by = [{"field":"published_at", "direction":"desc"}]
if 'order_by' not in search_params:
search_params['order_by'] = []
search_params['order_by'].extend(order_by)
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'],
preprocessors=dict(
GET_MANY=[pre_get_many]))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'])
[API] Sort CVE by published_at attribute.#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
def pre_get_many(search_params=None, **kw):
order_by = [{"field":"published_at", "direction":"desc"}]
if 'order_by' not in search_params:
search_params['order_by'] = []
search_params['order_by'].extend(order_by)
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'],
preprocessors=dict(
GET_MANY=[pre_get_many]))
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'])
<commit_msg>[API] Sort CVE by published_at attribute.<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Freshermeat - An open source software directory and release tracker.
# Copyright (C) 2017-2019 Cédric Bonhomme - https://www.cedricbonhomme.org
#
# For more information : https://gitlab.com/cedric/Freshermeat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from bootstrap import manager
from web import models
from web.views.api.v1 import processors
from web.views.api.v1.common import url_prefix
def pre_get_many(search_params=None, **kw):
order_by = [{"field":"published_at", "direction":"desc"}]
if 'order_by' not in search_params:
search_params['order_by'] = []
search_params['order_by'].extend(order_by)
blueprint_cve = manager.create_api_blueprint(
models.CVE,
url_prefix=url_prefix,
methods=['GET'],
preprocessors=dict(
GET_MANY=[pre_get_many]))
|
143e76eaf220e5200150653627642dc2bc3a645e
|
examples/network_correlations.py
|
examples/network_correlations.py
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
Add correlation matrix heatmap example
|
Add correlation matrix heatmap example
|
Python
|
bsd-3-clause
|
oesteban/seaborn,gef756/seaborn,arokem/seaborn,anntzer/seaborn,drewokane/seaborn,ischwabacher/seaborn,phobson/seaborn,ebothmann/seaborn,cwu2011/seaborn,sinhrks/seaborn,q1ang/seaborn,aashish24/seaborn,ashhher3/seaborn,dimarkov/seaborn,lypzln/seaborn,mwaskom/seaborn,JWarmenhoven/seaborn,olgabot/seaborn,dotsdl/seaborn,lukauskas/seaborn,phobson/seaborn,clarkfitzg/seaborn,dhimmel/seaborn,mclevey/seaborn,petebachant/seaborn,nileracecrew/seaborn,aashish24/seaborn,lukauskas/seaborn,uhjish/seaborn,kyleam/seaborn,bsipocz/seaborn,wrobstory/seaborn,mwaskom/seaborn,muku42/seaborn,sauliusl/seaborn,Lx37/seaborn,jakevdp/seaborn,Guokr1991/seaborn,parantapa/seaborn,tim777z/seaborn,anntzer/seaborn,jat255/seaborn,arokem/seaborn,huongttlan/seaborn,mia1rab/seaborn
|
Add correlation matrix heatmap example
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
<commit_before><commit_msg>Add correlation matrix heatmap example<commit_after>
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
Add correlation matrix heatmap example"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
<commit_before><commit_msg>Add correlation matrix heatmap example<commit_after>"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
|
cf09d16ca8c6852f39e9cc347e4d726a79d6c1cf
|
examples/python/helloworld/greeter_client_with_options.py
|
examples/python/helloworld/greeter_client_with_options.py
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
channel = grpc.insecure_channel(
target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10),
('grpc.max_receive_message_length', 12)])
stub = helloworld_pb2_grpc.GreeterStub(channel)
try:
# synchronous rpc call
stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
except Exception as err:
print('Raised by max_receive_message_length option\n' + str(err))
try:
# asynchronous rpc call. timeout in second
future = stub.SayHello.future(helloworld_pb2.HelloRequest(name='me'), timeout=1)
response = future.result()
print("Greeter client received: " + response.message)
finally:
channel.close()
if __name__ == '__main__':
run()
|
Add python example - channel options
|
Add python example - channel options
|
Python
|
apache-2.0
|
ejona86/grpc,grpc/grpc,carl-mastrangelo/grpc,ejona86/grpc,grpc/grpc,mehrdada/grpc,carl-mastrangelo/grpc,vjpai/grpc,carl-mastrangelo/grpc,ctiller/grpc,ejona86/grpc,donnadionne/grpc,grpc/grpc,nicolasnoble/grpc,vjpai/grpc,stanley-cheung/grpc,nicolasnoble/grpc,donnadionne/grpc,pszemus/grpc,muxi/grpc,vjpai/grpc,pszemus/grpc,stanley-cheung/grpc,carl-mastrangelo/grpc,jboeuf/grpc,firebase/grpc,jboeuf/grpc,nicolasnoble/grpc,ejona86/grpc,mehrdada/grpc,carl-mastrangelo/grpc,pszemus/grpc,stanley-cheung/grpc,muxi/grpc,firebase/grpc,pszemus/grpc,jtattermusch/grpc,ctiller/grpc,vjpai/grpc,nicolasnoble/grpc,muxi/grpc,jboeuf/grpc,donnadionne/grpc,donnadionne/grpc,stanley-cheung/grpc,firebase/grpc,grpc/grpc,donnadionne/grpc,muxi/grpc,pszemus/grpc,jboeuf/grpc,jtattermusch/grpc,mehrdada/grpc,vjpai/grpc,grpc/grpc,ctiller/grpc,carl-mastrangelo/grpc,sreecha/grpc,firebase/grpc,pszemus/grpc,grpc/grpc,pszemus/grpc,jtattermusch/grpc,muxi/grpc,jtattermusch/grpc,vjpai/grpc,jtattermusch/grpc,muxi/grpc,ctiller/grpc,jtattermusch/grpc,mehrdada/grpc,jboeuf/grpc,muxi/grpc,grpc/grpc,stanley-cheung/grpc,pszemus/grpc,nicolasnoble/grpc,jboeuf/grpc,mehrdada/grpc,vjpai/grpc,sreecha/grpc,nicolasnoble/grpc,donnadionne/grpc,carl-mastrangelo/grpc,ejona86/grpc,nicolasnoble/grpc,donnadionne/grpc,ctiller/grpc,donnadionne/grpc,grpc/grpc,carl-mastrangelo/grpc,pszemus/grpc,firebase/grpc,ejona86/grpc,sreecha/grpc,mehrdada/grpc,stanley-cheung/grpc,firebase/grpc,ctiller/grpc,carl-mastrangelo/grpc,muxi/grpc,pszemus/grpc,vjpai/grpc,muxi/grpc,ctiller/grpc,jboeuf/grpc,carl-mastrangelo/grpc,mehrdada/grpc,jboeuf/grpc,donnadionne/grpc,ctiller/grpc,jboeuf/grpc,vjpai/grpc,grpc/grpc,sreecha/grpc,grpc/grpc,pszemus/grpc,ctiller/grpc,ejona86/grpc,nicolasnoble/grpc,muxi/grpc,ejona86/grpc,firebase/grpc,ctiller/grpc,jboeuf/grpc,muxi/grpc,mehrdada/grpc,vjpai/grpc,jboeuf/grpc,stanley-cheung/grpc,stanley-cheung/grpc,nicolasnoble/grpc,sreecha/grpc,jtattermusch/grpc,nicolasnoble/grpc,grpc/grpc,pszemus/grpc,ejona86/grpc,ejona86/grpc,stanley-cheung/grpc,ctiller/grpc,carl-mastrangelo/grpc,ejona86/grpc,jboeuf/grpc,sreecha/grpc,firebase/grpc,grpc/grpc,stanley-cheung/grpc,nicolasnoble/grpc,stanley-cheung/grpc,jtattermusch/grpc,donnadionne/grpc,mehrdada/grpc,vjpai/grpc,ctiller/grpc,sreecha/grpc,nicolasnoble/grpc,sreecha/grpc,firebase/grpc,sreecha/grpc,sreecha/grpc,vjpai/grpc,ejona86/grpc,firebase/grpc,muxi/grpc,stanley-cheung/grpc,jtattermusch/grpc,sreecha/grpc,jtattermusch/grpc,firebase/grpc,mehrdada/grpc,mehrdada/grpc,donnadionne/grpc,donnadionne/grpc,jtattermusch/grpc,sreecha/grpc,jtattermusch/grpc,firebase/grpc,mehrdada/grpc,carl-mastrangelo/grpc
|
Add python example - channel options
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
channel = grpc.insecure_channel(
target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10),
('grpc.max_receive_message_length', 12)])
stub = helloworld_pb2_grpc.GreeterStub(channel)
try:
# synchronous rpc call
stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
except Exception as err:
print('Raised by max_receive_message_length option\n' + str(err))
try:
# asynchronous rpc call. timeout in second
future = stub.SayHello.future(helloworld_pb2.HelloRequest(name='me'), timeout=1)
response = future.result()
print("Greeter client received: " + response.message)
finally:
channel.close()
if __name__ == '__main__':
run()
|
<commit_before><commit_msg>Add python example - channel options<commit_after>
|
# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
channel = grpc.insecure_channel(
target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10),
('grpc.max_receive_message_length', 12)])
stub = helloworld_pb2_grpc.GreeterStub(channel)
try:
# synchronous rpc call
stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
except Exception as err:
print('Raised by max_receive_message_length option\n' + str(err))
try:
# asynchronous rpc call. timeout in second
future = stub.SayHello.future(helloworld_pb2.HelloRequest(name='me'), timeout=1)
response = future.result()
print("Greeter client received: " + response.message)
finally:
channel.close()
if __name__ == '__main__':
run()
|
Add python example - channel options# Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
channel = grpc.insecure_channel(
target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10),
('grpc.max_receive_message_length', 12)])
stub = helloworld_pb2_grpc.GreeterStub(channel)
try:
# synchronous rpc call
stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
except Exception as err:
print('Raised by max_receive_message_length option\n' + str(err))
try:
# asynchronous rpc call. timeout in second
future = stub.SayHello.future(helloworld_pb2.HelloRequest(name='me'), timeout=1)
response = future.result()
print("Greeter client received: " + response.message)
finally:
channel.close()
if __name__ == '__main__':
run()
|
<commit_before><commit_msg>Add python example - channel options<commit_after># Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
def run():
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
channel = grpc.insecure_channel(
target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10),
('grpc.max_receive_message_length', 12)])
stub = helloworld_pb2_grpc.GreeterStub(channel)
try:
# synchronous rpc call
stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
except Exception as err:
print('Raised by max_receive_message_length option\n' + str(err))
try:
# asynchronous rpc call. timeout in second
future = stub.SayHello.future(helloworld_pb2.HelloRequest(name='me'), timeout=1)
response = future.result()
print("Greeter client received: " + response.message)
finally:
channel.close()
if __name__ == '__main__':
run()
|
|
36d597c38537e68b648fb9c85e8ec448e324c41e
|
mrequests/examples/get_save_file.py
|
mrequests/examples/get_save_file.py
|
import mrequests
host = 'http://httpbin.org/'
#host = "http://localhost/"
url = host + "image"
filename = "image.png"
r = mrequests.get(url, headers={b"accept": b"image/png"})
if r.status_code == 200:
r.save(filename)
print("Image saved to '{}'.".format(filename))
else:
print("Request failed. Status: {}".format(r.status_code))
r.close()
|
Add example for saving request data to file
|
Add example for saving request data to file
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>
|
Python
|
mit
|
SpotlightKid/micropython-stm-lib
|
Add example for saving request data to file
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>
|
import mrequests
host = 'http://httpbin.org/'
#host = "http://localhost/"
url = host + "image"
filename = "image.png"
r = mrequests.get(url, headers={b"accept": b"image/png"})
if r.status_code == 200:
r.save(filename)
print("Image saved to '{}'.".format(filename))
else:
print("Request failed. Status: {}".format(r.status_code))
r.close()
|
<commit_before><commit_msg>Add example for saving request data to file
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de><commit_after>
|
import mrequests
host = 'http://httpbin.org/'
#host = "http://localhost/"
url = host + "image"
filename = "image.png"
r = mrequests.get(url, headers={b"accept": b"image/png"})
if r.status_code == 200:
r.save(filename)
print("Image saved to '{}'.".format(filename))
else:
print("Request failed. Status: {}".format(r.status_code))
r.close()
|
Add example for saving request data to file
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de>import mrequests
host = 'http://httpbin.org/'
#host = "http://localhost/"
url = host + "image"
filename = "image.png"
r = mrequests.get(url, headers={b"accept": b"image/png"})
if r.status_code == 200:
r.save(filename)
print("Image saved to '{}'.".format(filename))
else:
print("Request failed. Status: {}".format(r.status_code))
r.close()
|
<commit_before><commit_msg>Add example for saving request data to file
Signed-off-by: Christopher Arndt <711c73f64afdce07b7e38039a96d2224209e9a6c@chrisarndt.de><commit_after>import mrequests
host = 'http://httpbin.org/'
#host = "http://localhost/"
url = host + "image"
filename = "image.png"
r = mrequests.get(url, headers={b"accept": b"image/png"})
if r.status_code == 200:
r.save(filename)
print("Image saved to '{}'.".format(filename))
else:
print("Request failed. Status: {}".format(r.status_code))
r.close()
|
|
081612c1a07422cfa5292482c409e46346d15dc5
|
domm/tests/test_crossref.py
|
domm/tests/test_crossref.py
|
##############################################################################
# Name: test_crossref.py
# Purpose: Test for verifying output the semantic verification of cross-refs
# Author: Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# Copyright: (c) 2014 Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# License: MIT License
##############################################################################
from domm.parser import DommParser
from domm.metamodel import *
def test_model():
pass
|
Add test stub for crossref
|
Add test stub for crossref
|
Python
|
mit
|
Ygg01/master
|
Add test stub for crossref
|
##############################################################################
# Name: test_crossref.py
# Purpose: Test for verifying output the semantic verification of cross-refs
# Author: Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# Copyright: (c) 2014 Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# License: MIT License
##############################################################################
from domm.parser import DommParser
from domm.metamodel import *
def test_model():
pass
|
<commit_before><commit_msg>Add test stub for crossref<commit_after>
|
##############################################################################
# Name: test_crossref.py
# Purpose: Test for verifying output the semantic verification of cross-refs
# Author: Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# Copyright: (c) 2014 Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# License: MIT License
##############################################################################
from domm.parser import DommParser
from domm.metamodel import *
def test_model():
pass
|
Add test stub for crossref##############################################################################
# Name: test_crossref.py
# Purpose: Test for verifying output the semantic verification of cross-refs
# Author: Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# Copyright: (c) 2014 Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# License: MIT License
##############################################################################
from domm.parser import DommParser
from domm.metamodel import *
def test_model():
pass
|
<commit_before><commit_msg>Add test stub for crossref<commit_after>##############################################################################
# Name: test_crossref.py
# Purpose: Test for verifying output the semantic verification of cross-refs
# Author: Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# Copyright: (c) 2014 Daniel Fath <daniel DOT fath7 AT gmail DOT com>
# License: MIT License
##############################################################################
from domm.parser import DommParser
from domm.metamodel import *
def test_model():
pass
|
|
2204ff556bf6257dcfeb39447b04607729d6b4d2
|
py/counting-bits.py
|
py/counting-bits.py
|
class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
|
Add py solution for 338. Counting Bits
|
Add py solution for 338. Counting Bits
338. Counting Bits: https://leetcode.com/problems/counting-bits/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 338. Counting Bits
338. Counting Bits: https://leetcode.com/problems/counting-bits/
|
class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
|
<commit_before><commit_msg>Add py solution for 338. Counting Bits
338. Counting Bits: https://leetcode.com/problems/counting-bits/<commit_after>
|
class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
|
Add py solution for 338. Counting Bits
338. Counting Bits: https://leetcode.com/problems/counting-bits/class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
|
<commit_before><commit_msg>Add py solution for 338. Counting Bits
338. Counting Bits: https://leetcode.com/problems/counting-bits/<commit_after>class Solution(object):
def count_bits(self, n):
c = (n - ((n >> 1) & 0o33333333333) - ((n >> 2) & 0o11111111111))
return ((c + (c >> 3)) & 0o30707070707) % 63
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
return map(self.count_bits, xrange(num + 1))
|
|
b6920055ca0498bc621da51e5296a001415166dd
|
tests/test_Cubie.py
|
tests/test_Cubie.py
|
import src.Cubie as Cubie
import unittest
class TestSticker(unittest.TestCase):
def test_init(self):
not_allowed_chars = 'acdefhijklmnpqstuvxz'
allowed_chars = 'rgbywo.'
for c in not_allowed_chars:
self.assertRaises(ValueError, Cubie.Sticker, c)
for c in allowed_chars:
s = Cubie.Sticker(c)
self.assertEqual(s.color, c)
if __name__ == '__main__':
unittest.main()
|
Rename tests file to test_*
|
Rename tests file to test_*
|
Python
|
mit
|
Wiston999/python-rubik
|
Rename tests file to test_*
|
import src.Cubie as Cubie
import unittest
class TestSticker(unittest.TestCase):
def test_init(self):
not_allowed_chars = 'acdefhijklmnpqstuvxz'
allowed_chars = 'rgbywo.'
for c in not_allowed_chars:
self.assertRaises(ValueError, Cubie.Sticker, c)
for c in allowed_chars:
s = Cubie.Sticker(c)
self.assertEqual(s.color, c)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Rename tests file to test_*<commit_after>
|
import src.Cubie as Cubie
import unittest
class TestSticker(unittest.TestCase):
def test_init(self):
not_allowed_chars = 'acdefhijklmnpqstuvxz'
allowed_chars = 'rgbywo.'
for c in not_allowed_chars:
self.assertRaises(ValueError, Cubie.Sticker, c)
for c in allowed_chars:
s = Cubie.Sticker(c)
self.assertEqual(s.color, c)
if __name__ == '__main__':
unittest.main()
|
Rename tests file to test_*import src.Cubie as Cubie
import unittest
class TestSticker(unittest.TestCase):
def test_init(self):
not_allowed_chars = 'acdefhijklmnpqstuvxz'
allowed_chars = 'rgbywo.'
for c in not_allowed_chars:
self.assertRaises(ValueError, Cubie.Sticker, c)
for c in allowed_chars:
s = Cubie.Sticker(c)
self.assertEqual(s.color, c)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Rename tests file to test_*<commit_after>import src.Cubie as Cubie
import unittest
class TestSticker(unittest.TestCase):
def test_init(self):
not_allowed_chars = 'acdefhijklmnpqstuvxz'
allowed_chars = 'rgbywo.'
for c in not_allowed_chars:
self.assertRaises(ValueError, Cubie.Sticker, c)
for c in allowed_chars:
s = Cubie.Sticker(c)
self.assertEqual(s.color, c)
if __name__ == '__main__':
unittest.main()
|
|
315858ae41722442668e289f2c1492a591526f0f
|
chmvh_website/common/tests/templatetags/test_phone_number_tag.py
|
chmvh_website/common/tests/templatetags/test_phone_number_tag.py
|
from common.templatetags.common import phone_number
class TestPhoneNumberTag(object):
"""Test cases for the phone number tag"""
def test_multi_whitespace(self):
"""Test passing in a string with lots of whitespace.
Whitespace more than 1 character wide should be condensed down
to one non-breaking space.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
def test_standard_number(self):
"""Test passing in a standard 10 digit phone number.
The output should replace whitespace with non-breaking spaces.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
|
Add tests for phone number template tag.
|
Add tests for phone number template tag.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
Add tests for phone number template tag.
|
from common.templatetags.common import phone_number
class TestPhoneNumberTag(object):
"""Test cases for the phone number tag"""
def test_multi_whitespace(self):
"""Test passing in a string with lots of whitespace.
Whitespace more than 1 character wide should be condensed down
to one non-breaking space.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
def test_standard_number(self):
"""Test passing in a standard 10 digit phone number.
The output should replace whitespace with non-breaking spaces.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
|
<commit_before><commit_msg>Add tests for phone number template tag.<commit_after>
|
from common.templatetags.common import phone_number
class TestPhoneNumberTag(object):
"""Test cases for the phone number tag"""
def test_multi_whitespace(self):
"""Test passing in a string with lots of whitespace.
Whitespace more than 1 character wide should be condensed down
to one non-breaking space.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
def test_standard_number(self):
"""Test passing in a standard 10 digit phone number.
The output should replace whitespace with non-breaking spaces.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
|
Add tests for phone number template tag.from common.templatetags.common import phone_number
class TestPhoneNumberTag(object):
"""Test cases for the phone number tag"""
def test_multi_whitespace(self):
"""Test passing in a string with lots of whitespace.
Whitespace more than 1 character wide should be condensed down
to one non-breaking space.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
def test_standard_number(self):
"""Test passing in a standard 10 digit phone number.
The output should replace whitespace with non-breaking spaces.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
|
<commit_before><commit_msg>Add tests for phone number template tag.<commit_after>from common.templatetags.common import phone_number
class TestPhoneNumberTag(object):
"""Test cases for the phone number tag"""
def test_multi_whitespace(self):
"""Test passing in a string with lots of whitespace.
Whitespace more than 1 character wide should be condensed down
to one non-breaking space.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
def test_standard_number(self):
"""Test passing in a standard 10 digit phone number.
The output should replace whitespace with non-breaking spaces.
"""
num = '(555) 555-5555'
out = phone_number(num)
assert out == '(555) 555-5555'
|
|
3a82bf4d0597c79a41029004b1a5622ad70268a9
|
all_builds.py
|
all_builds.py
|
#!/usr/bin/python
import subprocess
import sys
def RunCommand(command):
run = subprocess.Popen(command, shell=True)
output = run.communicate()
if run.returncode:
print "Non-zero return code: " + str(run.returncode) + " => exiting!"
sys.exit(1)
def list_of_experiments():
experiments = []
configure_file = open("configure")
list_start = False
for line in configure_file.read().split("\n"):
if line == 'EXPERIMENT_LIST="':
list_start = True
elif line == '"':
list_start = False
elif list_start:
currently_broken = ["csm"]
experiment = line[4:]
if experiment not in currently_broken:
experiments.append(experiment)
return experiments
def main():
base_command = "./configure --enable-internal-stats"
test_build(base_command)
for experiment_name in list_of_experiments():
test_build("%s --enable-experimental --enable-%s" % (base_command,
experiment_name))
def test_build(configure_command):
print "\033[34m\033[47mTesting %s\033[0m" % (configure_command)
RunCommand(configure_command)
RunCommand("make clean")
RunCommand("make")
if __name__ == "__main__":
main()
|
Add script to test all builds.
|
Add script to test all builds.
Change-Id: I6bbed8bcb2dfa3458ffc59179dfba66c92e18125
|
Python
|
bsd-3-clause
|
jmvalin/aom,ittiamvpx/libvpx,smarter/aom,shacklettbp/aom,mwgoldsmith/libvpx,Laknot/libvpx,altogother/webm.libvpx,Distrotech/libvpx,altogother/webm.libvpx,running770/libvpx,hsueceumd/test_hui,matanbs/vp982,Topopiccione/libvpx,zofuthan/libvpx,Laknot/libvpx,reimaginemedia/webm.libvpx,webmproject/libvpx,shareefalis/libvpx,kleopatra999/webm.libvpx,hsueceumd/test_hui,altogother/webm.libvpx,mbebenita/aom,jmvalin/aom,charup/https---github.com-webmproject-libvpx-,Suvarna1488/webm.libvpx,liqianggao/libvpx,luctrudeau/aom,WebRTC-Labs/libvpx,matanbs/vp982,Laknot/libvpx,liqianggao/libvpx,mwgoldsmith/libvpx,hsueceumd/test_hui,jmvalin/aom,vasilvv/esvp8,hsueceumd/test_hui,mbebenita/aom,turbulenz/libvpx,shareefalis/libvpx,mbebenita/aom,turbulenz/libvpx,mbebenita/aom,Suvarna1488/webm.libvpx,luctrudeau/aom,kalli123/webm.libvpx,ShiftMediaProject/libvpx,mbebenita/aom,sanyaade-teachings/libvpx,gshORTON/webm.libvpx,shareefalis/libvpx,GrokImageCompression/aom,ittiamvpx/libvpx,ittiamvpx/libvpx,smarter/aom,openpeer/libvpx_new,VTCSecureLLC/libvpx,mbebenita/aom,openpeer/libvpx_new,Suvarna1488/webm.libvpx,jacklicn/webm.libvpx,ittiamvpx/libvpx-1,charup/https---github.com-webmproject-libvpx-,felipebetancur/libvpx,felipebetancur/libvpx,kalli123/webm.libvpx,smarter/aom,shyamalschandra/libvpx,zofuthan/libvpx,reimaginemedia/webm.libvpx,running770/libvpx,matanbs/vp982,Maria1099/webm.libvpx,pcwalton/libvpx,running770/libvpx,sanyaade-teachings/libvpx,shareefalis/libvpx,turbulenz/libvpx,Distrotech/libvpx,luctrudeau/aom,kim42083/webm.libvpx,gshORTON/webm.libvpx,mbebenita/aom,zofuthan/libvpx,liqianggao/libvpx,mwgoldsmith/libvpx,vasilvv/esvp8,sanyaade-teachings/libvpx,mbebenita/aom,luctrudeau/aom,shacklettbp/aom,Maria1099/webm.libvpx,stewnorriss/libvpx,matanbs/webm.libvpx,webmproject/libvpx,VTCSecureLLC/libvpx,kleopatra999/webm.libvpx,kalli123/webm.libvpx,openpeer/libvpx_new,shyamalschandra/libvpx,liqianggao/libvpx,cinema6/libvpx,liqianggao/libvpx,GrokImageCompression/aom,abwiz0086/webm.libvpx,thdav/aom,vasilvv/esvp8,Distrotech/libvpx,jmvalin/aom,felipebetancur/libvpx,VTCSecureLLC/libvpx,abwiz0086/webm.libvpx,kalli123/webm.libvpx,reimaginemedia/webm.libvpx,matanbs/vp982,VTCSecureLLC/libvpx,jdm/libvpx,gshORTON/webm.libvpx,lyx2014/libvpx_c,reimaginemedia/webm.libvpx,reimaginemedia/webm.libvpx,jdm/libvpx,zofuthan/libvpx,jacklicn/webm.libvpx,n4t/libvpx,thdav/aom,pcwalton/libvpx,pcwalton/libvpx,goodleixiao/vpx,gshORTON/webm.libvpx,shyamalschandra/libvpx,mwgoldsmith/vpx,turbulenz/libvpx,turbulenz/libvpx,shareefalis/libvpx,Suvarna1488/webm.libvpx,matanbs/webm.libvpx,Topopiccione/libvpx,gshORTON/webm.libvpx,running770/libvpx,turbulenz/libvpx,sanyaade-teachings/libvpx,goodleixiao/vpx,thdav/aom,kleopatra999/webm.libvpx,Distrotech/libvpx,pcwalton/libvpx,kim42083/webm.libvpx,turbulenz/libvpx,zofuthan/libvpx,shyamalschandra/libvpx,cinema6/libvpx,gshORTON/webm.libvpx,vasilvv/esvp8,mwgoldsmith/vpx,lyx2014/libvpx_c,vasilvv/esvp8,shareefalis/libvpx,shacklettbp/aom,kalli123/webm.libvpx,smarter/aom,shyamalschandra/libvpx,altogother/webm.libvpx,WebRTC-Labs/libvpx,jdm/libvpx,abwiz0086/webm.libvpx,Acidburn0zzz/webm.libvpx,turbulenz/libvpx,openpeer/libvpx_new,Topopiccione/libvpx,jacklicn/webm.libvpx,ShiftMediaProject/libvpx,matanbs/vp982,n4t/libvpx,Topopiccione/libvpx,Suvarna1488/webm.libvpx,pcwalton/libvpx,hsueceumd/test_hui,iniwf/webm.libvpx,openpeer/libvpx_new,jacklicn/webm.libvpx,webmproject/libvpx,cinema6/libvpx,mwgoldsmith/libvpx,felipebetancur/libvpx,Distrotech/libvpx,n4t/libvpx,stewnorriss/libvpx,n4t/libvpx,kim42083/webm.libvpx,ittiamvpx/libvpx-1,Topopiccione/libvpx,matanbs/webm.libvpx,goodleixiao/vpx,lyx2014/libvpx_c,GrokImageCompression/aom,Distrotech/libvpx,kleopatra999/webm.libvpx,charup/https---github.com-webmproject-libvpx-,mwgoldsmith/vpx,charup/https---github.com-webmproject-libvpx-,jmvalin/aom,ittiamvpx/libvpx-1,hsueceumd/test_hui,mwgoldsmith/libvpx,iniwf/webm.libvpx,Acidburn0zzz/webm.libvpx,shyamalschandra/libvpx,Acidburn0zzz/webm.libvpx,cinema6/libvpx,Laknot/libvpx,felipebetancur/libvpx,mwgoldsmith/vpx,jacklicn/webm.libvpx,luctrudeau/aom,matanbs/vp982,mwgoldsmith/vpx,felipebetancur/libvpx,openpeer/libvpx_new,altogother/webm.libvpx,goodleixiao/vpx,Laknot/libvpx,stewnorriss/libvpx,ittiamvpx/libvpx-1,Topopiccione/libvpx,ittiamvpx/libvpx-1,iniwf/webm.libvpx,iniwf/webm.libvpx,abwiz0086/webm.libvpx,stewnorriss/libvpx,WebRTC-Labs/libvpx,jdm/libvpx,kleopatra999/webm.libvpx,ittiamvpx/libvpx,mwgoldsmith/libvpx,Acidburn0zzz/webm.libvpx,mwgoldsmith/vpx,reimaginemedia/webm.libvpx,iniwf/webm.libvpx,jmvalin/aom,turbulenz/libvpx,Maria1099/webm.libvpx,GrokImageCompression/aom,thdav/aom,jacklicn/webm.libvpx,thdav/aom,ittiamvpx/libvpx,Acidburn0zzz/webm.libvpx,running770/libvpx,altogother/webm.libvpx,running770/libvpx,cinema6/libvpx,n4t/libvpx,vasilvv/esvp8,cinema6/libvpx,luctrudeau/aom,sanyaade-teachings/libvpx,ittiamvpx/libvpx,Suvarna1488/webm.libvpx,ShiftMediaProject/libvpx,WebRTC-Labs/libvpx,GrokImageCompression/aom,matanbs/vp982,jdm/libvpx,kleopatra999/webm.libvpx,ittiamvpx/libvpx-1,smarter/aom,lyx2014/libvpx_c,kim42083/webm.libvpx,WebRTC-Labs/libvpx,shacklettbp/aom,VTCSecureLLC/libvpx,lyx2014/libvpx_c,matanbs/webm.libvpx,cinema6/libvpx,Maria1099/webm.libvpx,matanbs/webm.libvpx,Maria1099/webm.libvpx,stewnorriss/libvpx,charup/https---github.com-webmproject-libvpx-,kim42083/webm.libvpx,thdav/aom,Maria1099/webm.libvpx,iniwf/webm.libvpx,ShiftMediaProject/libvpx,ShiftMediaProject/libvpx,charup/https---github.com-webmproject-libvpx-,goodleixiao/vpx,smarter/aom,Laknot/libvpx,goodleixiao/vpx,shacklettbp/aom,webmproject/libvpx,pcwalton/libvpx,VTCSecureLLC/libvpx,abwiz0086/webm.libvpx,Acidburn0zzz/webm.libvpx,kalli123/webm.libvpx,zofuthan/libvpx,matanbs/webm.libvpx,kim42083/webm.libvpx,shacklettbp/aom,abwiz0086/webm.libvpx,jdm/libvpx,stewnorriss/libvpx,webmproject/libvpx,webmproject/libvpx,mbebenita/aom,GrokImageCompression/aom,vasilvv/esvp8,liqianggao/libvpx,lyx2014/libvpx_c
|
Add script to test all builds.
Change-Id: I6bbed8bcb2dfa3458ffc59179dfba66c92e18125
|
#!/usr/bin/python
import subprocess
import sys
def RunCommand(command):
run = subprocess.Popen(command, shell=True)
output = run.communicate()
if run.returncode:
print "Non-zero return code: " + str(run.returncode) + " => exiting!"
sys.exit(1)
def list_of_experiments():
experiments = []
configure_file = open("configure")
list_start = False
for line in configure_file.read().split("\n"):
if line == 'EXPERIMENT_LIST="':
list_start = True
elif line == '"':
list_start = False
elif list_start:
currently_broken = ["csm"]
experiment = line[4:]
if experiment not in currently_broken:
experiments.append(experiment)
return experiments
def main():
base_command = "./configure --enable-internal-stats"
test_build(base_command)
for experiment_name in list_of_experiments():
test_build("%s --enable-experimental --enable-%s" % (base_command,
experiment_name))
def test_build(configure_command):
print "\033[34m\033[47mTesting %s\033[0m" % (configure_command)
RunCommand(configure_command)
RunCommand("make clean")
RunCommand("make")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to test all builds.
Change-Id: I6bbed8bcb2dfa3458ffc59179dfba66c92e18125<commit_after>
|
#!/usr/bin/python
import subprocess
import sys
def RunCommand(command):
run = subprocess.Popen(command, shell=True)
output = run.communicate()
if run.returncode:
print "Non-zero return code: " + str(run.returncode) + " => exiting!"
sys.exit(1)
def list_of_experiments():
experiments = []
configure_file = open("configure")
list_start = False
for line in configure_file.read().split("\n"):
if line == 'EXPERIMENT_LIST="':
list_start = True
elif line == '"':
list_start = False
elif list_start:
currently_broken = ["csm"]
experiment = line[4:]
if experiment not in currently_broken:
experiments.append(experiment)
return experiments
def main():
base_command = "./configure --enable-internal-stats"
test_build(base_command)
for experiment_name in list_of_experiments():
test_build("%s --enable-experimental --enable-%s" % (base_command,
experiment_name))
def test_build(configure_command):
print "\033[34m\033[47mTesting %s\033[0m" % (configure_command)
RunCommand(configure_command)
RunCommand("make clean")
RunCommand("make")
if __name__ == "__main__":
main()
|
Add script to test all builds.
Change-Id: I6bbed8bcb2dfa3458ffc59179dfba66c92e18125#!/usr/bin/python
import subprocess
import sys
def RunCommand(command):
run = subprocess.Popen(command, shell=True)
output = run.communicate()
if run.returncode:
print "Non-zero return code: " + str(run.returncode) + " => exiting!"
sys.exit(1)
def list_of_experiments():
experiments = []
configure_file = open("configure")
list_start = False
for line in configure_file.read().split("\n"):
if line == 'EXPERIMENT_LIST="':
list_start = True
elif line == '"':
list_start = False
elif list_start:
currently_broken = ["csm"]
experiment = line[4:]
if experiment not in currently_broken:
experiments.append(experiment)
return experiments
def main():
base_command = "./configure --enable-internal-stats"
test_build(base_command)
for experiment_name in list_of_experiments():
test_build("%s --enable-experimental --enable-%s" % (base_command,
experiment_name))
def test_build(configure_command):
print "\033[34m\033[47mTesting %s\033[0m" % (configure_command)
RunCommand(configure_command)
RunCommand("make clean")
RunCommand("make")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to test all builds.
Change-Id: I6bbed8bcb2dfa3458ffc59179dfba66c92e18125<commit_after>#!/usr/bin/python
import subprocess
import sys
def RunCommand(command):
run = subprocess.Popen(command, shell=True)
output = run.communicate()
if run.returncode:
print "Non-zero return code: " + str(run.returncode) + " => exiting!"
sys.exit(1)
def list_of_experiments():
experiments = []
configure_file = open("configure")
list_start = False
for line in configure_file.read().split("\n"):
if line == 'EXPERIMENT_LIST="':
list_start = True
elif line == '"':
list_start = False
elif list_start:
currently_broken = ["csm"]
experiment = line[4:]
if experiment not in currently_broken:
experiments.append(experiment)
return experiments
def main():
base_command = "./configure --enable-internal-stats"
test_build(base_command)
for experiment_name in list_of_experiments():
test_build("%s --enable-experimental --enable-%s" % (base_command,
experiment_name))
def test_build(configure_command):
print "\033[34m\033[47mTesting %s\033[0m" % (configure_command)
RunCommand(configure_command)
RunCommand("make clean")
RunCommand("make")
if __name__ == "__main__":
main()
|
|
12311fff64dba06b24c66dd0523e52f3cd8927b9
|
lib/bridgedb/test/test_Bucket.py
|
lib/bridgedb/test/test_Bucket.py
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
Add a test for Buckets
|
Add a test for Buckets
The others will require mocking a DB connection.
|
Python
|
bsd-3-clause
|
pagea/bridgedb,pagea/bridgedb
|
Add a test for Buckets
The others will require mocking a DB connection.
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
<commit_before><commit_msg>Add a test for Buckets
The others will require mocking a DB connection.<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
Add a test for Buckets
The others will require mocking a DB connection.# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
<commit_before><commit_msg>Add a test for Buckets
The others will require mocking a DB connection.<commit_after># -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: 3-Clause BSD, see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.Bucket` module.
These tests are meant to ensure that the :mod:`bridgedb.Bucket` module is
functioning as expected.
"""
from __future__ import print_function
import sure
from sure import this
from sure import the
from sure import expect
from bridgedb import Bucket
class BucketDataTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketData`."""
def test_alloc_some_of_the_bridges(self):
"""Set the needed number of bridges"""
alloc = 10
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(bucket.needed)
def test_alloc_all_the_bridges(self):
"""Set the needed number of bridges to the default"""
alloc = '*'
distname = "test-distributor"
bucket = Bucket.BucketData(distname, alloc)
this(distname).should.be.equal(bucket.name)
this(alloc).should.be.equal(1000000)
class BucketManagerTest(unittest.TestCase):
"""Tests for :class:`bridgedb.Bucket.BucketManager`."""
TEST_CONFIG_FILE = StringIO(unicode("""\
FILE_BUCKETS = { 'test1': 7, 'test2': 11 }
COLLECT_TIMESTAMPS = False
COUNTRY_BLOCK_FILE = []"""))
def setUp(self):
configuration = {}
TEST_CONFIG_FILE.seek(0)
compiled = compile(TEST_CONFIG_FILE.read(), '<string>', 'exec')
exec compiled in configuration
self.config = persistent.Conf(**configuration)
self.state = persistent.State(**config.__dict__)
self.bucket = Bucket.BucketManager(self.config)
|
|
3eb614d8c0edd1d52509c1ec7b48bba983600c35
|
examples/uart/txmod_test.py
|
examples/uart/txmod_test.py
|
import fault
import magma
from txmod import TXMOD
import random
def get_random(port):
if isinstance(port, magma.BitType):
return random.choice((0, 1))
N = type(port).N
return random.randint(0, 2 ** N - 1)
if __name__ == "__main__":
random.seed(0)
#TXMOD_v = magma.DefineFromVerilogFile("examples/uart/txmod.v")[0]
circ = TXMOD
tester = fault.Tester(circ, circ.CLK)
magma.compile("build/TXMOD", circ, output="coreir-verilog")
inputs = {}
outputs = {}
for name, port in circ.interface.ports.items():
if port is circ.CLK:
continue
if port.isoutput():
inputs[name] = port
elif port.isinput():
outputs[name] = port
tester.poke(circ.CLK, 0)
for i in range(2):
#tester.print_string("=========================================")
tester.print_string("========== inputs ====================")
for name, port in inputs.items():
tester.poke(port, get_random(port))
tester.print_format(f"{name}: %p\\n", port)
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.step()
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.print_string("========== outputs ====================")
for name, port in outputs.items():
tester.print_format(f"{name}: %p\\n", port)
tester.compile_and_run(target="verilator", flags=["-Wno-fatal"], skip_compile=True)
|
Add test for uart magma impl
|
Add test for uart magma impl
|
Python
|
mit
|
phanrahan/magmathon,phanrahan/magmathon
|
Add test for uart magma impl
|
import fault
import magma
from txmod import TXMOD
import random
def get_random(port):
if isinstance(port, magma.BitType):
return random.choice((0, 1))
N = type(port).N
return random.randint(0, 2 ** N - 1)
if __name__ == "__main__":
random.seed(0)
#TXMOD_v = magma.DefineFromVerilogFile("examples/uart/txmod.v")[0]
circ = TXMOD
tester = fault.Tester(circ, circ.CLK)
magma.compile("build/TXMOD", circ, output="coreir-verilog")
inputs = {}
outputs = {}
for name, port in circ.interface.ports.items():
if port is circ.CLK:
continue
if port.isoutput():
inputs[name] = port
elif port.isinput():
outputs[name] = port
tester.poke(circ.CLK, 0)
for i in range(2):
#tester.print_string("=========================================")
tester.print_string("========== inputs ====================")
for name, port in inputs.items():
tester.poke(port, get_random(port))
tester.print_format(f"{name}: %p\\n", port)
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.step()
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.print_string("========== outputs ====================")
for name, port in outputs.items():
tester.print_format(f"{name}: %p\\n", port)
tester.compile_and_run(target="verilator", flags=["-Wno-fatal"], skip_compile=True)
|
<commit_before><commit_msg>Add test for uart magma impl<commit_after>
|
import fault
import magma
from txmod import TXMOD
import random
def get_random(port):
if isinstance(port, magma.BitType):
return random.choice((0, 1))
N = type(port).N
return random.randint(0, 2 ** N - 1)
if __name__ == "__main__":
random.seed(0)
#TXMOD_v = magma.DefineFromVerilogFile("examples/uart/txmod.v")[0]
circ = TXMOD
tester = fault.Tester(circ, circ.CLK)
magma.compile("build/TXMOD", circ, output="coreir-verilog")
inputs = {}
outputs = {}
for name, port in circ.interface.ports.items():
if port is circ.CLK:
continue
if port.isoutput():
inputs[name] = port
elif port.isinput():
outputs[name] = port
tester.poke(circ.CLK, 0)
for i in range(2):
#tester.print_string("=========================================")
tester.print_string("========== inputs ====================")
for name, port in inputs.items():
tester.poke(port, get_random(port))
tester.print_format(f"{name}: %p\\n", port)
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.step()
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.print_string("========== outputs ====================")
for name, port in outputs.items():
tester.print_format(f"{name}: %p\\n", port)
tester.compile_and_run(target="verilator", flags=["-Wno-fatal"], skip_compile=True)
|
Add test for uart magma implimport fault
import magma
from txmod import TXMOD
import random
def get_random(port):
if isinstance(port, magma.BitType):
return random.choice((0, 1))
N = type(port).N
return random.randint(0, 2 ** N - 1)
if __name__ == "__main__":
random.seed(0)
#TXMOD_v = magma.DefineFromVerilogFile("examples/uart/txmod.v")[0]
circ = TXMOD
tester = fault.Tester(circ, circ.CLK)
magma.compile("build/TXMOD", circ, output="coreir-verilog")
inputs = {}
outputs = {}
for name, port in circ.interface.ports.items():
if port is circ.CLK:
continue
if port.isoutput():
inputs[name] = port
elif port.isinput():
outputs[name] = port
tester.poke(circ.CLK, 0)
for i in range(2):
#tester.print_string("=========================================")
tester.print_string("========== inputs ====================")
for name, port in inputs.items():
tester.poke(port, get_random(port))
tester.print_format(f"{name}: %p\\n", port)
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.step()
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.print_string("========== outputs ====================")
for name, port in outputs.items():
tester.print_format(f"{name}: %p\\n", port)
tester.compile_and_run(target="verilator", flags=["-Wno-fatal"], skip_compile=True)
|
<commit_before><commit_msg>Add test for uart magma impl<commit_after>import fault
import magma
from txmod import TXMOD
import random
def get_random(port):
if isinstance(port, magma.BitType):
return random.choice((0, 1))
N = type(port).N
return random.randint(0, 2 ** N - 1)
if __name__ == "__main__":
random.seed(0)
#TXMOD_v = magma.DefineFromVerilogFile("examples/uart/txmod.v")[0]
circ = TXMOD
tester = fault.Tester(circ, circ.CLK)
magma.compile("build/TXMOD", circ, output="coreir-verilog")
inputs = {}
outputs = {}
for name, port in circ.interface.ports.items():
if port is circ.CLK:
continue
if port.isoutput():
inputs[name] = port
elif port.isinput():
outputs[name] = port
tester.poke(circ.CLK, 0)
for i in range(2):
#tester.print_string("=========================================")
tester.print_string("========== inputs ====================")
for name, port in inputs.items():
tester.poke(port, get_random(port))
tester.print_format(f"{name}: %p\\n", port)
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.step()
tester.print_format(f"CLK: %p\\n", circ.CLK)
tester.print_string("========== outputs ====================")
for name, port in outputs.items():
tester.print_format(f"{name}: %p\\n", port)
tester.compile_and_run(target="verilator", flags=["-Wno-fatal"], skip_compile=True)
|
|
0a33ea9c108693e94405dbd964b9361bb2ae7e2a
|
hyperiontests/test_kibana.py
|
hyperiontests/test_kibana.py
|
# Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
class TestKibana(hyperion.HyperionTestCase):
def test_can_retrieve_logstash_dashboard(self):
response = self.http_get("kibana/index.html#/dashboard/file/logstash.json")
self.assertEqual(200, response.status_code)
|
Add unit tests for Kibana
|
Add unit tests for Kibana
|
Python
|
apache-2.0
|
portefaix/hyperion-k8s,portefaix/hyperion,nlamirault/hyperion
|
Add unit tests for Kibana
|
# Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
class TestKibana(hyperion.HyperionTestCase):
def test_can_retrieve_logstash_dashboard(self):
response = self.http_get("kibana/index.html#/dashboard/file/logstash.json")
self.assertEqual(200, response.status_code)
|
<commit_before><commit_msg>Add unit tests for Kibana<commit_after>
|
# Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
class TestKibana(hyperion.HyperionTestCase):
def test_can_retrieve_logstash_dashboard(self):
response = self.http_get("kibana/index.html#/dashboard/file/logstash.json")
self.assertEqual(200, response.status_code)
|
Add unit tests for Kibana# Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
class TestKibana(hyperion.HyperionTestCase):
def test_can_retrieve_logstash_dashboard(self):
response = self.http_get("kibana/index.html#/dashboard/file/logstash.json")
self.assertEqual(200, response.status_code)
|
<commit_before><commit_msg>Add unit tests for Kibana<commit_after># Copyright (C) 2014 Nicolas Lamirault <nicolas.lamirault@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hyperiontests import hyperion
class TestKibana(hyperion.HyperionTestCase):
def test_can_retrieve_logstash_dashboard(self):
response = self.http_get("kibana/index.html#/dashboard/file/logstash.json")
self.assertEqual(200, response.status_code)
|
|
2b3fcfbd9b497d3a162cb36af4eb28e619648258
|
project/commands/create_db.py
|
project/commands/create_db.py
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
from flask import current_app as app
from flask_script import Command
from project.database import init_db, drop_db
class CreateDBCommand(Command):
"""
Creates all tables from registered models
"""
def run(self, **kwargs):
app.logger.info("Running {} with arguments {}".format(self.__class__.__name__, kwargs))
self.__dict__.update(**kwargs) # update self's with kwargs
drop_db(app)
init_db(app)
|
Add command to create new database with all tables
|
Add command to create new database with all tables
|
Python
|
mit
|
andreffs18/flask-template-project,andreffs18/flask-template-project,andreffs18/flask-template-project
|
Add command to create new database with all tables
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
from flask import current_app as app
from flask_script import Command
from project.database import init_db, drop_db
class CreateDBCommand(Command):
"""
Creates all tables from registered models
"""
def run(self, **kwargs):
app.logger.info("Running {} with arguments {}".format(self.__class__.__name__, kwargs))
self.__dict__.update(**kwargs) # update self's with kwargs
drop_db(app)
init_db(app)
|
<commit_before><commit_msg>Add command to create new database with all tables<commit_after>
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
from flask import current_app as app
from flask_script import Command
from project.database import init_db, drop_db
class CreateDBCommand(Command):
"""
Creates all tables from registered models
"""
def run(self, **kwargs):
app.logger.info("Running {} with arguments {}".format(self.__class__.__name__, kwargs))
self.__dict__.update(**kwargs) # update self's with kwargs
drop_db(app)
init_db(app)
|
Add command to create new database with all tables# !/usr/bin/python
# -*- coding: utf-8 -*-
from flask import current_app as app
from flask_script import Command
from project.database import init_db, drop_db
class CreateDBCommand(Command):
"""
Creates all tables from registered models
"""
def run(self, **kwargs):
app.logger.info("Running {} with arguments {}".format(self.__class__.__name__, kwargs))
self.__dict__.update(**kwargs) # update self's with kwargs
drop_db(app)
init_db(app)
|
<commit_before><commit_msg>Add command to create new database with all tables<commit_after># !/usr/bin/python
# -*- coding: utf-8 -*-
from flask import current_app as app
from flask_script import Command
from project.database import init_db, drop_db
class CreateDBCommand(Command):
"""
Creates all tables from registered models
"""
def run(self, **kwargs):
app.logger.info("Running {} with arguments {}".format(self.__class__.__name__, kwargs))
self.__dict__.update(**kwargs) # update self's with kwargs
drop_db(app)
init_db(app)
|
|
a2400b6980089803b38121e20e2d24ee2f463eb1
|
keyring/tests/backends/test_chainer.py
|
keyring/tests/backends/test_chainer.py
|
import pytest
import keyring.backends.chainer
from keyring import backend
@pytest.fixture
def two_keyrings(monkeypatch):
def get_two():
class Keyring1(backend.KeyringBackend):
priority = 1
def get_password(self, system, user):
return 'ring1-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
pass
class Keyring2(backend.KeyringBackend):
priority = 2
def get_password(self, system, user):
return 'ring2-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
raise NotImplementedError()
return Keyring1(), Keyring2()
monkeypatch.setattr('keyring.backend.get_all_keyring', get_two)
class TestChainer:
def test_chainer_gets_from_highest_priority(self, two_keyrings):
chainer = keyring.backends.chainer.ChainerBackend()
pw = chainer.get_password('alpha', 'bravo')
assert pw == 'ring2-alpha-bravo'
|
Add a test for the chainer.
|
Add a test for the chainer.
|
Python
|
mit
|
jaraco/keyring
|
Add a test for the chainer.
|
import pytest
import keyring.backends.chainer
from keyring import backend
@pytest.fixture
def two_keyrings(monkeypatch):
def get_two():
class Keyring1(backend.KeyringBackend):
priority = 1
def get_password(self, system, user):
return 'ring1-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
pass
class Keyring2(backend.KeyringBackend):
priority = 2
def get_password(self, system, user):
return 'ring2-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
raise NotImplementedError()
return Keyring1(), Keyring2()
monkeypatch.setattr('keyring.backend.get_all_keyring', get_two)
class TestChainer:
def test_chainer_gets_from_highest_priority(self, two_keyrings):
chainer = keyring.backends.chainer.ChainerBackend()
pw = chainer.get_password('alpha', 'bravo')
assert pw == 'ring2-alpha-bravo'
|
<commit_before><commit_msg>Add a test for the chainer.<commit_after>
|
import pytest
import keyring.backends.chainer
from keyring import backend
@pytest.fixture
def two_keyrings(monkeypatch):
def get_two():
class Keyring1(backend.KeyringBackend):
priority = 1
def get_password(self, system, user):
return 'ring1-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
pass
class Keyring2(backend.KeyringBackend):
priority = 2
def get_password(self, system, user):
return 'ring2-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
raise NotImplementedError()
return Keyring1(), Keyring2()
monkeypatch.setattr('keyring.backend.get_all_keyring', get_two)
class TestChainer:
def test_chainer_gets_from_highest_priority(self, two_keyrings):
chainer = keyring.backends.chainer.ChainerBackend()
pw = chainer.get_password('alpha', 'bravo')
assert pw == 'ring2-alpha-bravo'
|
Add a test for the chainer.import pytest
import keyring.backends.chainer
from keyring import backend
@pytest.fixture
def two_keyrings(monkeypatch):
def get_two():
class Keyring1(backend.KeyringBackend):
priority = 1
def get_password(self, system, user):
return 'ring1-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
pass
class Keyring2(backend.KeyringBackend):
priority = 2
def get_password(self, system, user):
return 'ring2-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
raise NotImplementedError()
return Keyring1(), Keyring2()
monkeypatch.setattr('keyring.backend.get_all_keyring', get_two)
class TestChainer:
def test_chainer_gets_from_highest_priority(self, two_keyrings):
chainer = keyring.backends.chainer.ChainerBackend()
pw = chainer.get_password('alpha', 'bravo')
assert pw == 'ring2-alpha-bravo'
|
<commit_before><commit_msg>Add a test for the chainer.<commit_after>import pytest
import keyring.backends.chainer
from keyring import backend
@pytest.fixture
def two_keyrings(monkeypatch):
def get_two():
class Keyring1(backend.KeyringBackend):
priority = 1
def get_password(self, system, user):
return 'ring1-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
pass
class Keyring2(backend.KeyringBackend):
priority = 2
def get_password(self, system, user):
return 'ring2-{system}-{user}'.format(**locals())
def set_password(self, system, user, password):
raise NotImplementedError()
return Keyring1(), Keyring2()
monkeypatch.setattr('keyring.backend.get_all_keyring', get_two)
class TestChainer:
def test_chainer_gets_from_highest_priority(self, two_keyrings):
chainer = keyring.backends.chainer.ChainerBackend()
pw = chainer.get_password('alpha', 'bravo')
assert pw == 'ring2-alpha-bravo'
|
|
000266aed1f4c1106c791105427a7238add90a01
|
nodeconductor/core/management/commands/removestalect.py
|
nodeconductor/core/management/commands/removestalect.py
|
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from nodeconductor.cost_tracking import models as cost_tracking_models
class Command(BaseCommand):
help = "Remove instances that have FK to stale content types."
def handle(self, *args, **options):
for estimate in cost_tracking_models.PriceEstimate.objects.all():
print estimate.app
if estimate.content_type.model_class() is None:
estimate.delete()
for entry in LogEntry.objects.all():
if entry.content_type.model_class() is None:
entry.delete()
|
Add command that deletes stale content types
|
Add command that deletes stale content types
- nc-1511
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Add command that deletes stale content types
- nc-1511
|
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from nodeconductor.cost_tracking import models as cost_tracking_models
class Command(BaseCommand):
help = "Remove instances that have FK to stale content types."
def handle(self, *args, **options):
for estimate in cost_tracking_models.PriceEstimate.objects.all():
print estimate.app
if estimate.content_type.model_class() is None:
estimate.delete()
for entry in LogEntry.objects.all():
if entry.content_type.model_class() is None:
entry.delete()
|
<commit_before><commit_msg>Add command that deletes stale content types
- nc-1511<commit_after>
|
from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from nodeconductor.cost_tracking import models as cost_tracking_models
class Command(BaseCommand):
help = "Remove instances that have FK to stale content types."
def handle(self, *args, **options):
for estimate in cost_tracking_models.PriceEstimate.objects.all():
print estimate.app
if estimate.content_type.model_class() is None:
estimate.delete()
for entry in LogEntry.objects.all():
if entry.content_type.model_class() is None:
entry.delete()
|
Add command that deletes stale content types
- nc-1511from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from nodeconductor.cost_tracking import models as cost_tracking_models
class Command(BaseCommand):
help = "Remove instances that have FK to stale content types."
def handle(self, *args, **options):
for estimate in cost_tracking_models.PriceEstimate.objects.all():
print estimate.app
if estimate.content_type.model_class() is None:
estimate.delete()
for entry in LogEntry.objects.all():
if entry.content_type.model_class() is None:
entry.delete()
|
<commit_before><commit_msg>Add command that deletes stale content types
- nc-1511<commit_after>from django.contrib.admin.models import LogEntry
from django.core.management.base import BaseCommand
from nodeconductor.cost_tracking import models as cost_tracking_models
class Command(BaseCommand):
help = "Remove instances that have FK to stale content types."
def handle(self, *args, **options):
for estimate in cost_tracking_models.PriceEstimate.objects.all():
print estimate.app
if estimate.content_type.model_class() is None:
estimate.delete()
for entry in LogEntry.objects.all():
if entry.content_type.model_class() is None:
entry.delete()
|
|
5c0c8451f3975ae98be33e0f47dde73cb5a2e3ac
|
nodeconductor/structure/tests/unittests/test_filters.py
|
nodeconductor/structure/tests/unittests/test_filters.py
|
import mock
from django.test import TestCase
from nodeconductor.logging import models as logging_models
from nodeconductor.logging.tests import factories as logging_factories
from nodeconductor.structure.filters import AggregateFilter
from nodeconductor.structure.tests import factories
class AggregateFilterTest(TestCase):
def setUp(self):
self.customer = factories.CustomerFactory()
self.project = factories.ProjectFactory()
self.sut = AggregateFilter()
self.queryset = logging_models.Alert.objects
def test_service_alert_is_included_when_customer_is_the_same(self):
scope = factories.TestServiceFactory(customer=self.customer)
alert = logging_factories.AlertFactory(scope=scope)
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), 1)
self.assertTrue(result.filter(uuid=alert.uuid).exists())
def test_project_alert_is_not_included_when_it_belongs_to_another_customer(self):
alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertFalse(result.filter(uuid=alert.uuid).exists())
def test_only_customer_related_scopes_are_returned(self):
customer_related_alerts = []
invalid_alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
spl = factories.TestServiceProjectLinkFactory(service__customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=spl))
service = factories.TestServiceFactory(customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=service))
customer_related_alerts_ids = [a.uuid for a in customer_related_alerts]
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), len(customer_related_alerts))
self.assertEqual(result.filter(uuid__in=customer_related_alerts_ids).count(), len(customer_related_alerts_ids))
self.assertFalse(result.filter(uuid=invalid_alert.uuid).exists())
def test_service_project_link_alert_is_not_returned_when_it_is_related_to_another_project(self):
not_owned_alert = logging_factories.AlertFactory(scope=factories.TestServiceProjectLinkFactory())
spl = factories.TestServiceProjectLinkFactory(project=self.project)
owned_alert = logging_factories.AlertFactory(scope=spl)
result = self._make_aggregate_request('project', self.project.uuid.hex)
self.assertTrue(result.filter(uuid=owned_alert.uuid).exists())
self.assertFalse(result.filter(uuid=not_owned_alert.uuid).exists())
def _make_aggregate_request(self, aggregate_by, uuid):
request = mock.Mock()
request.query_params = {
'aggregate': aggregate_by,
'uuid': uuid,
}
return self.sut.filter(request, self.queryset, None)
|
Cover AggregateFilter with unit tests
|
Cover AggregateFilter with unit tests [WAL-397]
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Cover AggregateFilter with unit tests [WAL-397]
|
import mock
from django.test import TestCase
from nodeconductor.logging import models as logging_models
from nodeconductor.logging.tests import factories as logging_factories
from nodeconductor.structure.filters import AggregateFilter
from nodeconductor.structure.tests import factories
class AggregateFilterTest(TestCase):
def setUp(self):
self.customer = factories.CustomerFactory()
self.project = factories.ProjectFactory()
self.sut = AggregateFilter()
self.queryset = logging_models.Alert.objects
def test_service_alert_is_included_when_customer_is_the_same(self):
scope = factories.TestServiceFactory(customer=self.customer)
alert = logging_factories.AlertFactory(scope=scope)
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), 1)
self.assertTrue(result.filter(uuid=alert.uuid).exists())
def test_project_alert_is_not_included_when_it_belongs_to_another_customer(self):
alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertFalse(result.filter(uuid=alert.uuid).exists())
def test_only_customer_related_scopes_are_returned(self):
customer_related_alerts = []
invalid_alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
spl = factories.TestServiceProjectLinkFactory(service__customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=spl))
service = factories.TestServiceFactory(customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=service))
customer_related_alerts_ids = [a.uuid for a in customer_related_alerts]
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), len(customer_related_alerts))
self.assertEqual(result.filter(uuid__in=customer_related_alerts_ids).count(), len(customer_related_alerts_ids))
self.assertFalse(result.filter(uuid=invalid_alert.uuid).exists())
def test_service_project_link_alert_is_not_returned_when_it_is_related_to_another_project(self):
not_owned_alert = logging_factories.AlertFactory(scope=factories.TestServiceProjectLinkFactory())
spl = factories.TestServiceProjectLinkFactory(project=self.project)
owned_alert = logging_factories.AlertFactory(scope=spl)
result = self._make_aggregate_request('project', self.project.uuid.hex)
self.assertTrue(result.filter(uuid=owned_alert.uuid).exists())
self.assertFalse(result.filter(uuid=not_owned_alert.uuid).exists())
def _make_aggregate_request(self, aggregate_by, uuid):
request = mock.Mock()
request.query_params = {
'aggregate': aggregate_by,
'uuid': uuid,
}
return self.sut.filter(request, self.queryset, None)
|
<commit_before><commit_msg>Cover AggregateFilter with unit tests [WAL-397]<commit_after>
|
import mock
from django.test import TestCase
from nodeconductor.logging import models as logging_models
from nodeconductor.logging.tests import factories as logging_factories
from nodeconductor.structure.filters import AggregateFilter
from nodeconductor.structure.tests import factories
class AggregateFilterTest(TestCase):
def setUp(self):
self.customer = factories.CustomerFactory()
self.project = factories.ProjectFactory()
self.sut = AggregateFilter()
self.queryset = logging_models.Alert.objects
def test_service_alert_is_included_when_customer_is_the_same(self):
scope = factories.TestServiceFactory(customer=self.customer)
alert = logging_factories.AlertFactory(scope=scope)
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), 1)
self.assertTrue(result.filter(uuid=alert.uuid).exists())
def test_project_alert_is_not_included_when_it_belongs_to_another_customer(self):
alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertFalse(result.filter(uuid=alert.uuid).exists())
def test_only_customer_related_scopes_are_returned(self):
customer_related_alerts = []
invalid_alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
spl = factories.TestServiceProjectLinkFactory(service__customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=spl))
service = factories.TestServiceFactory(customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=service))
customer_related_alerts_ids = [a.uuid for a in customer_related_alerts]
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), len(customer_related_alerts))
self.assertEqual(result.filter(uuid__in=customer_related_alerts_ids).count(), len(customer_related_alerts_ids))
self.assertFalse(result.filter(uuid=invalid_alert.uuid).exists())
def test_service_project_link_alert_is_not_returned_when_it_is_related_to_another_project(self):
not_owned_alert = logging_factories.AlertFactory(scope=factories.TestServiceProjectLinkFactory())
spl = factories.TestServiceProjectLinkFactory(project=self.project)
owned_alert = logging_factories.AlertFactory(scope=spl)
result = self._make_aggregate_request('project', self.project.uuid.hex)
self.assertTrue(result.filter(uuid=owned_alert.uuid).exists())
self.assertFalse(result.filter(uuid=not_owned_alert.uuid).exists())
def _make_aggregate_request(self, aggregate_by, uuid):
request = mock.Mock()
request.query_params = {
'aggregate': aggregate_by,
'uuid': uuid,
}
return self.sut.filter(request, self.queryset, None)
|
Cover AggregateFilter with unit tests [WAL-397]import mock
from django.test import TestCase
from nodeconductor.logging import models as logging_models
from nodeconductor.logging.tests import factories as logging_factories
from nodeconductor.structure.filters import AggregateFilter
from nodeconductor.structure.tests import factories
class AggregateFilterTest(TestCase):
def setUp(self):
self.customer = factories.CustomerFactory()
self.project = factories.ProjectFactory()
self.sut = AggregateFilter()
self.queryset = logging_models.Alert.objects
def test_service_alert_is_included_when_customer_is_the_same(self):
scope = factories.TestServiceFactory(customer=self.customer)
alert = logging_factories.AlertFactory(scope=scope)
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), 1)
self.assertTrue(result.filter(uuid=alert.uuid).exists())
def test_project_alert_is_not_included_when_it_belongs_to_another_customer(self):
alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertFalse(result.filter(uuid=alert.uuid).exists())
def test_only_customer_related_scopes_are_returned(self):
customer_related_alerts = []
invalid_alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
spl = factories.TestServiceProjectLinkFactory(service__customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=spl))
service = factories.TestServiceFactory(customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=service))
customer_related_alerts_ids = [a.uuid for a in customer_related_alerts]
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), len(customer_related_alerts))
self.assertEqual(result.filter(uuid__in=customer_related_alerts_ids).count(), len(customer_related_alerts_ids))
self.assertFalse(result.filter(uuid=invalid_alert.uuid).exists())
def test_service_project_link_alert_is_not_returned_when_it_is_related_to_another_project(self):
not_owned_alert = logging_factories.AlertFactory(scope=factories.TestServiceProjectLinkFactory())
spl = factories.TestServiceProjectLinkFactory(project=self.project)
owned_alert = logging_factories.AlertFactory(scope=spl)
result = self._make_aggregate_request('project', self.project.uuid.hex)
self.assertTrue(result.filter(uuid=owned_alert.uuid).exists())
self.assertFalse(result.filter(uuid=not_owned_alert.uuid).exists())
def _make_aggregate_request(self, aggregate_by, uuid):
request = mock.Mock()
request.query_params = {
'aggregate': aggregate_by,
'uuid': uuid,
}
return self.sut.filter(request, self.queryset, None)
|
<commit_before><commit_msg>Cover AggregateFilter with unit tests [WAL-397]<commit_after>import mock
from django.test import TestCase
from nodeconductor.logging import models as logging_models
from nodeconductor.logging.tests import factories as logging_factories
from nodeconductor.structure.filters import AggregateFilter
from nodeconductor.structure.tests import factories
class AggregateFilterTest(TestCase):
def setUp(self):
self.customer = factories.CustomerFactory()
self.project = factories.ProjectFactory()
self.sut = AggregateFilter()
self.queryset = logging_models.Alert.objects
def test_service_alert_is_included_when_customer_is_the_same(self):
scope = factories.TestServiceFactory(customer=self.customer)
alert = logging_factories.AlertFactory(scope=scope)
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), 1)
self.assertTrue(result.filter(uuid=alert.uuid).exists())
def test_project_alert_is_not_included_when_it_belongs_to_another_customer(self):
alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertFalse(result.filter(uuid=alert.uuid).exists())
def test_only_customer_related_scopes_are_returned(self):
customer_related_alerts = []
invalid_alert = logging_factories.AlertFactory(scope=factories.ProjectFactory())
spl = factories.TestServiceProjectLinkFactory(service__customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=spl))
service = factories.TestServiceFactory(customer=self.customer)
customer_related_alerts.append(logging_factories.AlertFactory(scope=service))
customer_related_alerts_ids = [a.uuid for a in customer_related_alerts]
result = self._make_aggregate_request('customer', self.customer.uuid.hex)
self.assertEqual(len(result), len(customer_related_alerts))
self.assertEqual(result.filter(uuid__in=customer_related_alerts_ids).count(), len(customer_related_alerts_ids))
self.assertFalse(result.filter(uuid=invalid_alert.uuid).exists())
def test_service_project_link_alert_is_not_returned_when_it_is_related_to_another_project(self):
not_owned_alert = logging_factories.AlertFactory(scope=factories.TestServiceProjectLinkFactory())
spl = factories.TestServiceProjectLinkFactory(project=self.project)
owned_alert = logging_factories.AlertFactory(scope=spl)
result = self._make_aggregate_request('project', self.project.uuid.hex)
self.assertTrue(result.filter(uuid=owned_alert.uuid).exists())
self.assertFalse(result.filter(uuid=not_owned_alert.uuid).exists())
def _make_aggregate_request(self, aggregate_by, uuid):
request = mock.Mock()
request.query_params = {
'aggregate': aggregate_by,
'uuid': uuid,
}
return self.sut.filter(request, self.queryset, None)
|
|
acb985cf87917d7b29be517a5c5e2fd5285bebe1
|
py/count-binary-substrings.py
|
py/count-binary-substrings.py
|
from itertools import groupby
class Solution(object):
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
prev = 0
ans = 0
for k, g in groupby(s):
l = len(list(g))
ans += min(l, prev)
prev = l
return ans
|
Add py solution for 696. Count Binary Substrings
|
Add py solution for 696. Count Binary Substrings
696. Count Binary Substrings: https://leetcode.com/problems/count-binary-substrings/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 696. Count Binary Substrings
696. Count Binary Substrings: https://leetcode.com/problems/count-binary-substrings/
|
from itertools import groupby
class Solution(object):
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
prev = 0
ans = 0
for k, g in groupby(s):
l = len(list(g))
ans += min(l, prev)
prev = l
return ans
|
<commit_before><commit_msg>Add py solution for 696. Count Binary Substrings
696. Count Binary Substrings: https://leetcode.com/problems/count-binary-substrings/<commit_after>
|
from itertools import groupby
class Solution(object):
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
prev = 0
ans = 0
for k, g in groupby(s):
l = len(list(g))
ans += min(l, prev)
prev = l
return ans
|
Add py solution for 696. Count Binary Substrings
696. Count Binary Substrings: https://leetcode.com/problems/count-binary-substrings/from itertools import groupby
class Solution(object):
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
prev = 0
ans = 0
for k, g in groupby(s):
l = len(list(g))
ans += min(l, prev)
prev = l
return ans
|
<commit_before><commit_msg>Add py solution for 696. Count Binary Substrings
696. Count Binary Substrings: https://leetcode.com/problems/count-binary-substrings/<commit_after>from itertools import groupby
class Solution(object):
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
prev = 0
ans = 0
for k, g in groupby(s):
l = len(list(g))
ans += min(l, prev)
prev = l
return ans
|
|
48cd3380c23cc7a41917d281084c74420cd7b4f1
|
tests/test_pipeline_mnaseseq.py
|
tests/test_pipeline_mnaseseq.py
|
"""
.. Copyright 2017 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import pytest # pylint: disable=unused-import
from process_genome import process_genome
from process_mnaseseq import process_mnaseseq
@pytest.mark.mnaseseq
@pytest.mark.pipeline
def test_mnaseseq_pipeline():
"""
Test case to ensure that the MNase-seq pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \
--lang=python \
--library_path=${HOME}/bin \
--pythonpath=/<pyenv_virtenv_dir>/lib/python2.7/site-packages/ \
--log_level=debug \
process_chipseq.py \
--taxon_id 9606 \
--genome /<dataset_dir>/Human.GCA_000001405.22.fasta \
--assembly GRCh38 \
--file /<dataset_dir>/DRR000150.22.fastq
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_handle = process_genome()
genome_files, genome_meta = genome_handle.run(
[resource_path + 'inps.Mouse.GRCm38.fasta'],
{'assembly' : 'GRCm38'},
[]
)
files = [
resource_path + 'inps.Mouse.GRCm38.fasta'
]
files += genome_files[6:11]
files += [
resource_path + 'inps.Mouse.DRR000386.fastq'
]
metadata = {
'assembly' : 'GRCh38'
}
mnaseseq_handle = process_mnaseseq()
mnaseseq_files, mnaseseq_meta = mnaseseq_handle.run(files, metadata, [])
print(mnaseseq_files)
# Add tests for all files created
for f_out in mnaseseq_files:
print("MNASE-SEQ RESULTS FILE:", f_out)
assert os.path.isfile(f_out) is True
assert os.path.getsize(f_out) > 0
|
Test the pipeline code for the MNase-seq pipeline
|
Test the pipeline code for the MNase-seq pipeline
|
Python
|
apache-2.0
|
Multiscale-Genomics/mg-process-fastq,Multiscale-Genomics/mg-process-fastq,Multiscale-Genomics/mg-process-fastq
|
Test the pipeline code for the MNase-seq pipeline
|
"""
.. Copyright 2017 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import pytest # pylint: disable=unused-import
from process_genome import process_genome
from process_mnaseseq import process_mnaseseq
@pytest.mark.mnaseseq
@pytest.mark.pipeline
def test_mnaseseq_pipeline():
"""
Test case to ensure that the MNase-seq pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \
--lang=python \
--library_path=${HOME}/bin \
--pythonpath=/<pyenv_virtenv_dir>/lib/python2.7/site-packages/ \
--log_level=debug \
process_chipseq.py \
--taxon_id 9606 \
--genome /<dataset_dir>/Human.GCA_000001405.22.fasta \
--assembly GRCh38 \
--file /<dataset_dir>/DRR000150.22.fastq
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_handle = process_genome()
genome_files, genome_meta = genome_handle.run(
[resource_path + 'inps.Mouse.GRCm38.fasta'],
{'assembly' : 'GRCm38'},
[]
)
files = [
resource_path + 'inps.Mouse.GRCm38.fasta'
]
files += genome_files[6:11]
files += [
resource_path + 'inps.Mouse.DRR000386.fastq'
]
metadata = {
'assembly' : 'GRCh38'
}
mnaseseq_handle = process_mnaseseq()
mnaseseq_files, mnaseseq_meta = mnaseseq_handle.run(files, metadata, [])
print(mnaseseq_files)
# Add tests for all files created
for f_out in mnaseseq_files:
print("MNASE-SEQ RESULTS FILE:", f_out)
assert os.path.isfile(f_out) is True
assert os.path.getsize(f_out) > 0
|
<commit_before><commit_msg>Test the pipeline code for the MNase-seq pipeline<commit_after>
|
"""
.. Copyright 2017 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import pytest # pylint: disable=unused-import
from process_genome import process_genome
from process_mnaseseq import process_mnaseseq
@pytest.mark.mnaseseq
@pytest.mark.pipeline
def test_mnaseseq_pipeline():
"""
Test case to ensure that the MNase-seq pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \
--lang=python \
--library_path=${HOME}/bin \
--pythonpath=/<pyenv_virtenv_dir>/lib/python2.7/site-packages/ \
--log_level=debug \
process_chipseq.py \
--taxon_id 9606 \
--genome /<dataset_dir>/Human.GCA_000001405.22.fasta \
--assembly GRCh38 \
--file /<dataset_dir>/DRR000150.22.fastq
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_handle = process_genome()
genome_files, genome_meta = genome_handle.run(
[resource_path + 'inps.Mouse.GRCm38.fasta'],
{'assembly' : 'GRCm38'},
[]
)
files = [
resource_path + 'inps.Mouse.GRCm38.fasta'
]
files += genome_files[6:11]
files += [
resource_path + 'inps.Mouse.DRR000386.fastq'
]
metadata = {
'assembly' : 'GRCh38'
}
mnaseseq_handle = process_mnaseseq()
mnaseseq_files, mnaseseq_meta = mnaseseq_handle.run(files, metadata, [])
print(mnaseseq_files)
# Add tests for all files created
for f_out in mnaseseq_files:
print("MNASE-SEQ RESULTS FILE:", f_out)
assert os.path.isfile(f_out) is True
assert os.path.getsize(f_out) > 0
|
Test the pipeline code for the MNase-seq pipeline"""
.. Copyright 2017 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import pytest # pylint: disable=unused-import
from process_genome import process_genome
from process_mnaseseq import process_mnaseseq
@pytest.mark.mnaseseq
@pytest.mark.pipeline
def test_mnaseseq_pipeline():
"""
Test case to ensure that the MNase-seq pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \
--lang=python \
--library_path=${HOME}/bin \
--pythonpath=/<pyenv_virtenv_dir>/lib/python2.7/site-packages/ \
--log_level=debug \
process_chipseq.py \
--taxon_id 9606 \
--genome /<dataset_dir>/Human.GCA_000001405.22.fasta \
--assembly GRCh38 \
--file /<dataset_dir>/DRR000150.22.fastq
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_handle = process_genome()
genome_files, genome_meta = genome_handle.run(
[resource_path + 'inps.Mouse.GRCm38.fasta'],
{'assembly' : 'GRCm38'},
[]
)
files = [
resource_path + 'inps.Mouse.GRCm38.fasta'
]
files += genome_files[6:11]
files += [
resource_path + 'inps.Mouse.DRR000386.fastq'
]
metadata = {
'assembly' : 'GRCh38'
}
mnaseseq_handle = process_mnaseseq()
mnaseseq_files, mnaseseq_meta = mnaseseq_handle.run(files, metadata, [])
print(mnaseseq_files)
# Add tests for all files created
for f_out in mnaseseq_files:
print("MNASE-SEQ RESULTS FILE:", f_out)
assert os.path.isfile(f_out) is True
assert os.path.getsize(f_out) > 0
|
<commit_before><commit_msg>Test the pipeline code for the MNase-seq pipeline<commit_after>"""
.. Copyright 2017 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import pytest # pylint: disable=unused-import
from process_genome import process_genome
from process_mnaseseq import process_mnaseseq
@pytest.mark.mnaseseq
@pytest.mark.pipeline
def test_mnaseseq_pipeline():
"""
Test case to ensure that the MNase-seq pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \
--lang=python \
--library_path=${HOME}/bin \
--pythonpath=/<pyenv_virtenv_dir>/lib/python2.7/site-packages/ \
--log_level=debug \
process_chipseq.py \
--taxon_id 9606 \
--genome /<dataset_dir>/Human.GCA_000001405.22.fasta \
--assembly GRCh38 \
--file /<dataset_dir>/DRR000150.22.fastq
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_handle = process_genome()
genome_files, genome_meta = genome_handle.run(
[resource_path + 'inps.Mouse.GRCm38.fasta'],
{'assembly' : 'GRCm38'},
[]
)
files = [
resource_path + 'inps.Mouse.GRCm38.fasta'
]
files += genome_files[6:11]
files += [
resource_path + 'inps.Mouse.DRR000386.fastq'
]
metadata = {
'assembly' : 'GRCh38'
}
mnaseseq_handle = process_mnaseseq()
mnaseseq_files, mnaseseq_meta = mnaseseq_handle.run(files, metadata, [])
print(mnaseseq_files)
# Add tests for all files created
for f_out in mnaseseq_files:
print("MNASE-SEQ RESULTS FILE:", f_out)
assert os.path.isfile(f_out) is True
assert os.path.getsize(f_out) > 0
|
|
558d3e45e8e7c81ab46b560e6c1beb791f1e935f
|
derrida/__init__.py
|
derrida/__init__.py
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 3, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
Remove dev suffix from version
|
Remove dev suffix from version
|
Python
|
apache-2.0
|
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Remove dev suffix from version
|
__version_info__ = (1, 3, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Remove dev suffix from version<commit_after>
|
__version_info__ = (1, 3, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Remove dev suffix from version__version_info__ = (1, 3, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Remove dev suffix from version<commit_after>__version_info__ = (1, 3, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
7aa7d6222256b20fd256c27479e54594fc646405
|
tests/test_cli.py
|
tests/test_cli.py
|
from tomso import cli
import unittest
tmpfile = 'data/tmpfile'
class TestCLIFunctions(unittest.TestCase):
def setUp(self):
self.parser = cli.get_parser()
def test_info_guess_format(self):
filenames = ['data/mesa.%s' % ext for ext in
['amdl', 'fgong', 'gyre', 'history', 'profile']]
filenames.extend(['data/modelS.agsm'])
for filename in filenames:
args = self.parser.parse_args(['info', filename])
cli.info(args)
def test_info_explicit_format(self):
filename_formats =[('data/stars.out', 'stars-summ'),
('data/stars.plot', 'stars-plot')]
for filename, format in filename_formats:
args = self.parser.parse_args(['info', filename, '-F', format])
cli.info(args)
def test_convert(self):
args = self.parser.parse_args(['convert', 'data/modelS.fgong', '-f',
'fgong', '-t', 'gyre', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'gyre', '-t', 'amdl', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'amdl', '-t', 'fgong', '-o', tmpfile])
cli.convert(args)
# TODO: test for failure on incorrect formats
|
Add some basic tests of command-line interface
|
Add some basic tests of command-line interface
|
Python
|
mit
|
warrickball/tomso
|
Add some basic tests of command-line interface
|
from tomso import cli
import unittest
tmpfile = 'data/tmpfile'
class TestCLIFunctions(unittest.TestCase):
def setUp(self):
self.parser = cli.get_parser()
def test_info_guess_format(self):
filenames = ['data/mesa.%s' % ext for ext in
['amdl', 'fgong', 'gyre', 'history', 'profile']]
filenames.extend(['data/modelS.agsm'])
for filename in filenames:
args = self.parser.parse_args(['info', filename])
cli.info(args)
def test_info_explicit_format(self):
filename_formats =[('data/stars.out', 'stars-summ'),
('data/stars.plot', 'stars-plot')]
for filename, format in filename_formats:
args = self.parser.parse_args(['info', filename, '-F', format])
cli.info(args)
def test_convert(self):
args = self.parser.parse_args(['convert', 'data/modelS.fgong', '-f',
'fgong', '-t', 'gyre', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'gyre', '-t', 'amdl', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'amdl', '-t', 'fgong', '-o', tmpfile])
cli.convert(args)
# TODO: test for failure on incorrect formats
|
<commit_before><commit_msg>Add some basic tests of command-line interface<commit_after>
|
from tomso import cli
import unittest
tmpfile = 'data/tmpfile'
class TestCLIFunctions(unittest.TestCase):
def setUp(self):
self.parser = cli.get_parser()
def test_info_guess_format(self):
filenames = ['data/mesa.%s' % ext for ext in
['amdl', 'fgong', 'gyre', 'history', 'profile']]
filenames.extend(['data/modelS.agsm'])
for filename in filenames:
args = self.parser.parse_args(['info', filename])
cli.info(args)
def test_info_explicit_format(self):
filename_formats =[('data/stars.out', 'stars-summ'),
('data/stars.plot', 'stars-plot')]
for filename, format in filename_formats:
args = self.parser.parse_args(['info', filename, '-F', format])
cli.info(args)
def test_convert(self):
args = self.parser.parse_args(['convert', 'data/modelS.fgong', '-f',
'fgong', '-t', 'gyre', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'gyre', '-t', 'amdl', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'amdl', '-t', 'fgong', '-o', tmpfile])
cli.convert(args)
# TODO: test for failure on incorrect formats
|
Add some basic tests of command-line interfacefrom tomso import cli
import unittest
tmpfile = 'data/tmpfile'
class TestCLIFunctions(unittest.TestCase):
def setUp(self):
self.parser = cli.get_parser()
def test_info_guess_format(self):
filenames = ['data/mesa.%s' % ext for ext in
['amdl', 'fgong', 'gyre', 'history', 'profile']]
filenames.extend(['data/modelS.agsm'])
for filename in filenames:
args = self.parser.parse_args(['info', filename])
cli.info(args)
def test_info_explicit_format(self):
filename_formats =[('data/stars.out', 'stars-summ'),
('data/stars.plot', 'stars-plot')]
for filename, format in filename_formats:
args = self.parser.parse_args(['info', filename, '-F', format])
cli.info(args)
def test_convert(self):
args = self.parser.parse_args(['convert', 'data/modelS.fgong', '-f',
'fgong', '-t', 'gyre', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'gyre', '-t', 'amdl', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'amdl', '-t', 'fgong', '-o', tmpfile])
cli.convert(args)
# TODO: test for failure on incorrect formats
|
<commit_before><commit_msg>Add some basic tests of command-line interface<commit_after>from tomso import cli
import unittest
tmpfile = 'data/tmpfile'
class TestCLIFunctions(unittest.TestCase):
def setUp(self):
self.parser = cli.get_parser()
def test_info_guess_format(self):
filenames = ['data/mesa.%s' % ext for ext in
['amdl', 'fgong', 'gyre', 'history', 'profile']]
filenames.extend(['data/modelS.agsm'])
for filename in filenames:
args = self.parser.parse_args(['info', filename])
cli.info(args)
def test_info_explicit_format(self):
filename_formats =[('data/stars.out', 'stars-summ'),
('data/stars.plot', 'stars-plot')]
for filename, format in filename_formats:
args = self.parser.parse_args(['info', filename, '-F', format])
cli.info(args)
def test_convert(self):
args = self.parser.parse_args(['convert', 'data/modelS.fgong', '-f',
'fgong', '-t', 'gyre', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'gyre', '-t', 'amdl', '-o', tmpfile])
cli.convert(args)
args = self.parser.parse_args(['convert', tmpfile, '-f',
'amdl', '-t', 'fgong', '-o', tmpfile])
cli.convert(args)
# TODO: test for failure on incorrect formats
|
|
12c4b353f4f4ae4fb52a1c05862057f1ef36314b
|
addons/membership/wizard/__init__.py
|
addons/membership/wizard/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
import membership_unpaid_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Remove unpaid inovice import from init file
|
[FIX] membership: Remove unpaid inovice import from init file
bzr revid: mra@mra-laptop-20101006072219-twq77xlem3d69rg7
|
Python
|
agpl-3.0
|
ShineFan/odoo,optima-ict/odoo,Ernesto99/odoo,papouso/odoo,avoinsystems/odoo,BT-ojossen/odoo,joariasl/odoo,Ernesto99/odoo,numerigraphe/odoo,JGarcia-Panach/odoo,fgesora/odoo,bplancher/odoo,tvibliani/odoo,cedk/odoo,OpusVL/odoo,Endika/OpenUpgrade,codekaki/odoo,mszewczy/odoo,OSSESAC/odoopubarquiluz,storm-computers/odoo,Endika/odoo,papouso/odoo,shingonoide/odoo,doomsterinc/odoo,hassoon3/odoo,credativUK/OCB,fevxie/odoo,cpyou/odoo,fjbatresv/odoo,joshuajan/odoo,wangjun/odoo,OSSESAC/odoopubarquiluz,rowemoore/odoo,synconics/odoo,rgeleta/odoo,shivam1111/odoo,tvibliani/odoo,ecosoft-odoo/odoo,savoirfairelinux/odoo,MarcosCommunity/odoo,shingonoide/odoo,nitinitprof/odoo,ClearCorp-dev/odoo,hanicker/odoo,mustafat/odoo-1,bealdav/OpenUpgrade,hassoon3/odoo,bkirui/odoo,OpenPymeMx/OCB,laslabs/odoo,frouty/odoo_oph,charbeljc/OCB,jpshort/odoo,alhashash/odoo,xzYue/odoo,abdellatifkarroum/odoo,BT-rmartin/odoo,rgeleta/odoo,luiseduardohdbackup/odoo,nuuuboo/odoo,ClearCorp-dev/odoo,damdam-s/OpenUpgrade,inspyration/odoo,dkubiak789/odoo,oasiswork/odoo,dkubiak789/odoo,x111ong/odoo,grap/OpenUpgrade,ShineFan/odoo,pedrobaeza/OpenUpgrade,tangyiyong/odoo,JonathanStein/odoo,deKupini/erp,VitalPet/odoo,oasiswork/odoo,apanju/GMIO_Odoo,brijeshkesariya/odoo,factorlibre/OCB,nhomar/odoo,jeasoft/odoo,ThinkOpen-Solutions/odoo,naousse/odoo,rschnapka/odoo,pedrobaeza/OpenUpgrade,realsaiko/odoo,CubicERP/odoo,chiragjogi/odoo,leoliujie/odoo,RafaelTorrealba/odoo,VitalPet/odoo,dsfsdgsbngfggb/odoo,Kilhog/odoo,OpenUpgrade/OpenUpgrade,bplancher/odoo,pedrobaeza/odoo,mvaled/OpenUpgrade,nhomar/odoo,hoatle/odoo,arthru/OpenUpgrade,highco-groupe/odoo,Grirrane/odoo,MarcosCommunity/odoo,agrista/odoo-saas,guerrerocarlos/odoo,Drooids/odoo,omprakasha/odoo,OpenUpgrade-dev/OpenUpgrade,tangyiyong/odoo,collex100/odoo,charbeljc/OCB,dariemp/odoo,syci/OCB,andreparames/odoo,hoatle/odoo,apocalypsebg/odoo,BT-fgarbely/odoo,jesramirez/odoo,fdvarela/odoo8,colinnewell/odoo,stonegithubs/odoo,MarcosCommunity/odoo,Antiun/odoo,jeasoft/odoo,rgeleta/odoo,spadae22/odoo,mszewczy/odoo,stonegithubs/odoo,sadleader/odoo,Maspear/odoo,alexcuellar/odoo,GauravSahu/odoo,janocat/odoo,dllsf/odootest,jpshort/odoo,stephen144/odoo,christophlsa/odoo,odootr/odoo,xzYue/odoo,hopeall/odoo,shaufi/odoo,tangyiyong/odoo,glovebx/odoo,feroda/odoo,RafaelTorrealba/odoo,highco-groupe/odoo,JonathanStein/odoo,0k/odoo,Adel-Magebinary/odoo,windedge/odoo,dllsf/odootest,florentx/OpenUpgrade,Adel-Magebinary/odoo,Nick-OpusVL/odoo,AuyaJackie/odoo,kybriainfotech/iSocioCRM,fjbatresv/odoo,glovebx/odoo,BT-astauder/odoo,gsmartway/odoo,blaggacao/OpenUpgrade,bakhtout/odoo-educ,factorlibre/OCB,vrenaville/ngo-addons-backport,Kilhog/odoo,patmcb/odoo,dgzurita/odoo,CopeX/odoo,nhomar/odoo-mirror,optima-ict/odoo,provaleks/o8,rahuldhote/odoo,rubencabrera/odoo,hip-odoo/odoo,damdam-s/OpenUpgrade,luiseduardohdbackup/odoo,tarzan0820/odoo,glovebx/odoo,Nick-OpusVL/odoo,credativUK/OCB,nhomar/odoo,Drooids/odoo,papouso/odoo,tvtsoft/odoo8,salaria/odoo,cloud9UG/odoo,lgscofield/odoo,ClearCorp-dev/odoo,GauravSahu/odoo,elmerdpadilla/iv,0k/OpenUpgrade,factorlibre/OCB,tinkerthaler/odoo,hbrunn/OpenUpgrade,dgzurita/odoo,hopeall/odoo,Ichag/odoo,tinkerthaler/odoo,sysadminmatmoz/OCB,Ichag/odoo,Antiun/odoo,janocat/odoo,eino-makitalo/odoo,oliverhr/odoo,gsmartway/odoo,slevenhagen/odoo,srimai/odoo,csrocha/OpenUpgrade,nagyistoce/odoo-dev-odoo,incaser/odoo-odoo,PongPi/isl-odoo,ChanduERP/odoo,minhtuancn/odoo,syci/OCB,dfang/odoo,jpshort/odoo,naousse/odoo,Gitlab11/odoo,Daniel-CA/odoo,fdvarela/odoo8,ubic135/odoo-design,nuuuboo/odoo,sv-dev1/odoo,syci/OCB,apocalypsebg/odoo,nhomar/odoo,abstract-open-solutions/OCB,bakhtout/odoo-educ,steedos/odoo,ojengwa/odoo,chiragjogi/odoo,Drooids/odoo,hifly/OpenUpgrade,pplatek/odoo,sebalix/OpenUpgrade,datenbetrieb/odoo,0k/OpenUpgrade,JGarcia-Panach/odoo,sergio-incaser/odoo,fgesora/odoo,bealdav/OpenUpgrade,jeasoft/odoo,simongoffin/website_version,dalegregory/odoo,deKupini/erp,sv-dev1/odoo,cedk/odoo,windedge/odoo,gvb/odoo,windedge/odoo,incaser/odoo-odoo,apocalypsebg/odoo,0k/odoo,markeTIC/OCB,leorochael/odoo,frouty/odoogoeen,srsman/odoo,Bachaco-ve/odoo,colinnewell/odoo,apanju/odoo,lgscofield/odoo,dezynetechnologies/odoo,tinkhaven-organization/odoo,sebalix/OpenUpgrade,sve-odoo/odoo,colinnewell/odoo,mmbtba/odoo,sadleader/odoo,savoirfairelinux/OpenUpgrade,slevenhagen/odoo-npg,NeovaHealth/odoo,BT-rmartin/odoo,doomsterinc/odoo,tangyiyong/odoo,nitinitprof/odoo,fevxie/odoo,demon-ru/iml-crm,nuncjo/odoo,bealdav/OpenUpgrade,odoo-turkiye/odoo,ovnicraft/odoo,Noviat/odoo,gavin-feng/odoo,ecosoft-odoo/odoo,virgree/odoo,patmcb/odoo,bwrsandman/OpenUpgrade,ingadhoc/odoo,avoinsystems/odoo,VitalPet/odoo,OpenUpgrade/OpenUpgrade,RafaelTorrealba/odoo,gsmartway/odoo,FlorianLudwig/odoo,nagyistoce/odoo-dev-odoo,ShineFan/odoo,Endika/OpenUpgrade,aviciimaxwell/odoo,nhomar/odoo-mirror,alhashash/odoo,hbrunn/OpenUpgrade,frouty/odoo_oph,numerigraphe/odoo,csrocha/OpenUpgrade,collex100/odoo,NeovaHealth/odoo,zchking/odoo,ygol/odoo,stephen144/odoo,funkring/fdoo,cpyou/odoo,leorochael/odoo,matrixise/odoo,SAM-IT-SA/odoo,hifly/OpenUpgrade,janocat/odoo,janocat/odoo,Maspear/odoo,thanhacun/odoo,javierTerry/odoo,n0m4dz/odoo,dfang/odoo,vnsofthe/odoo,gavin-feng/odoo,guewen/OpenUpgrade,patmcb/odoo,jeasoft/odoo,jusdng/odoo,jolevq/odoopub,addition-it-solutions/project-all,rowemoore/odoo,BT-rmartin/odoo,damdam-s/OpenUpgrade,xujb/odoo,bkirui/odoo,dsfsdgsbngfggb/odoo,optima-ict/odoo,JonathanStein/odoo,Bachaco-ve/odoo,lombritz/odoo,lightcn/odoo,nexiles/odoo,osvalr/odoo,kifcaliph/odoo,cedk/odoo,RafaelTorrealba/odoo,laslabs/odoo,ThinkOpen-Solutions/odoo,bwrsandman/OpenUpgrade,gdgellatly/OCB1,luiseduardohdbackup/odoo,leorochael/odoo,dkubiak789/odoo,NL66278/OCB,VitalPet/odoo,hmen89/odoo,xzYue/odoo,bwrsandman/OpenUpgrade,Danisan/odoo-1,lsinfo/odoo,juanalfonsopr/odoo,OpenPymeMx/OCB,Endika/odoo,aviciimaxwell/odoo,doomsterinc/odoo,camptocamp/ngo-addons-backport,leoliujie/odoo,bakhtout/odoo-educ,grap/OCB,takis/odoo,storm-computers/odoo,Danisan/odoo-1,guewen/OpenUpgrade,hanicker/odoo,mlaitinen/odoo,eino-makitalo/odoo,tinkhaven-organization/odoo,nitinitprof/odoo,hoatle/odoo,leoliujie/odoo,OpusVL/odoo,matrixise/odoo,SerpentCS/odoo,ramitalat/odoo,xujb/odoo,fjbatresv/odoo,idncom/odoo,andreparames/odoo,bobisme/odoo,ygol/odoo,nuncjo/odoo,waytai/odoo,spadae22/odoo,shaufi/odoo,Drooids/odoo,frouty/odoogoeen,odootr/odoo,waytai/odoo,markeTIC/OCB,alexteodor/odoo,blaggacao/OpenUpgrade,highco-groupe/odoo,sysadminmatmoz/OCB,Kilhog/odoo,charbeljc/OCB,RafaelTorrealba/odoo,takis/odoo,kirca/OpenUpgrade,OpenPymeMx/OCB,factorlibre/OCB,numerigraphe/odoo,avoinsystems/odoo,avoinsystems/odoo,codekaki/odoo,oihane/odoo,diagramsoftware/odoo,gorjuce/odoo,synconics/odoo,fuhongliang/odoo,florentx/OpenUpgrade,prospwro/odoo,rahuldhote/odoo,jpshort/odoo,blaggacao/OpenUpgrade,inspyration/odoo,feroda/odoo,takis/odoo,srimai/odoo,doomsterinc/odoo,arthru/OpenUpgrade,alexcuellar/odoo,alexteodor/odoo,xzYue/odoo,rschnapka/odoo,andreparames/odoo,sebalix/OpenUpgrade,lombritz/odoo,gavin-feng/odoo,hopeall/odoo,Ernesto99/odoo,janocat/odoo,tvibliani/odoo,prospwro/odoo,pplatek/odoo,hassoon3/odoo,podemos-info/odoo,dllsf/odootest,odoo-turkiye/odoo,jiangzhixiao/odoo,Maspear/odoo,rgeleta/odoo,tarzan0820/odoo,rgeleta/odoo,jiangzhixiao/odoo,jfpla/odoo,NL66278/OCB,rschnapka/odoo,rahuldhote/odoo,slevenhagen/odoo,demon-ru/iml-crm,bealdav/OpenUpgrade,vrenaville/ngo-addons-backport,Bachaco-ve/odoo,factorlibre/OCB,ujjwalwahi/odoo,havt/odoo,provaleks/o8,lsinfo/odoo,syci/OCB,apanju/GMIO_Odoo,lsinfo/odoo,dllsf/odootest,sebalix/OpenUpgrade,bobisme/odoo,frouty/odoogoeen,mlaitinen/odoo,cloud9UG/odoo,hoatle/odoo,csrocha/OpenUpgrade,osvalr/odoo,JCA-Developpement/Odoo,KontorConsulting/odoo,idncom/odoo,hbrunn/OpenUpgrade,alqfahad/odoo,osvalr/odoo,ygol/odoo,ojengwa/odoo,damdam-s/OpenUpgrade,oihane/odoo,0k/OpenUpgrade,guewen/OpenUpgrade,storm-computers/odoo,kirca/OpenUpgrade,ovnicraft/odoo,rdeheele/odoo,spadae22/odoo,omprakasha/odoo,abdellatifkarroum/odoo,CubicERP/odoo,sergio-incaser/odoo,tinkerthaler/odoo,janocat/odoo,datenbetrieb/odoo,ApuliaSoftware/odoo,rowemoore/odoo,CubicERP/odoo,ramadhane/odoo,jiachenning/odoo,dezynetechnologies/odoo,naousse/odoo,Daniel-CA/odoo,guerrerocarlos/odoo,markeTIC/OCB,odooindia/odoo,eino-makitalo/odoo,jiachenning/odoo,Noviat/odoo,jolevq/odoopub,mvaled/OpenUpgrade,fdvarela/odoo8,kybriainfotech/iSocioCRM,nexiles/odoo,slevenhagen/odoo-npg,dfang/odoo,klunwebale/odoo,sinbazhou/odoo,salaria/odoo,jaxkodex/odoo,jiangzhixiao/odoo,jeasoft/odoo,leorochael/odoo,tarzan0820/odoo,camptocamp/ngo-addons-backport,gdgellatly/OCB1,tinkhaven-organization/odoo,patmcb/odoo,vrenaville/ngo-addons-backport,poljeff/odoo,bakhtout/odoo-educ,Antiun/odoo,leoliujie/odoo,lightcn/odoo,chiragjogi/odoo,jiangzhixiao/odoo,sinbazhou/odoo,poljeff/odoo,shaufi10/odoo,dariemp/odoo,mszewczy/odoo,leoliujie/odoo,synconics/odoo,dezynetechnologies/odoo,jaxkodex/odoo,xzYue/odoo,massot/odoo,vnsofthe/odoo,mustafat/odoo-1,Danisan/odoo-1,ingadhoc/odoo,makinacorpus/odoo,shivam1111/odoo,jaxkodex/odoo,jfpla/odoo,hip-odoo/odoo,sinbazhou/odoo,waytai/odoo,OSSESAC/odoopubarquiluz,lombritz/odoo,ecosoft-odoo/odoo,abenzbiria/clients_odoo,hmen89/odoo,optima-ict/odoo,tinkerthaler/odoo,mkieszek/odoo,nitinitprof/odoo,KontorConsulting/odoo,avoinsystems/odoo,slevenhagen/odoo-npg,hip-odoo/odoo,ccomb/OpenUpgrade,doomsterinc/odoo,rschnapka/odoo,makinacorpus/odoo,zchking/odoo,Noviat/odoo,CatsAndDogsbvba/odoo,nexiles/odoo,optima-ict/odoo,rubencabrera/odoo,osvalr/odoo,provaleks/o8,JonathanStein/odoo,chiragjogi/odoo,pedrobaeza/odoo,synconics/odoo,OpenUpgrade/OpenUpgrade,sebalix/OpenUpgrade,grap/OpenUpgrade,rubencabrera/odoo,javierTerry/odoo,blaggacao/OpenUpgrade,oliverhr/odoo,SerpentCS/odoo,javierTerry/odoo,dariemp/odoo,ramitalat/odoo,alqfahad/odoo,alqfahad/odoo,apanju/odoo,dariemp/odoo,erkrishna9/odoo,OpenUpgrade-dev/OpenUpgrade,deKupini/erp,odoousers2014/odoo,n0m4dz/odoo,collex100/odoo,salaria/odoo,bkirui/odoo,fjbatresv/odoo,feroda/odoo,ujjwalwahi/odoo,andreparames/odoo,sinbazhou/odoo,thanhacun/odoo,ThinkOpen-Solutions/odoo,numerigraphe/odoo,alexcuellar/odoo,rubencabrera/odoo,ThinkOpen-Solutions/odoo,FlorianLudwig/odoo,lgscofield/odoo,acshan/odoo,ShineFan/odoo,brijeshkesariya/odoo,nuuuboo/odoo,MarcosCommunity/odoo,hbrunn/OpenUpgrade,diagramsoftware/odoo,pedrobaeza/odoo,MarcosCommunity/odoo,pedrobaeza/OpenUpgrade,GauravSahu/odoo,kifcaliph/odoo,0k/OpenUpgrade,havt/odoo,prospwro/odoo,acshan/odoo,SerpentCS/odoo,bguillot/OpenUpgrade,thanhacun/odoo,VielSoft/odoo,matrixise/odoo,numerigraphe/odoo,NeovaHealth/odoo,frouty/odoogoeen,hassoon3/odoo,shaufi10/odoo,rdeheele/odoo,tarzan0820/odoo,hmen89/odoo,MarcosCommunity/odoo,credativUK/OCB,xujb/odoo,leorochael/odoo,javierTerry/odoo,juanalfonsopr/odoo,gvb/odoo,mkieszek/odoo,shaufi10/odoo,shivam1111/odoo,bwrsandman/OpenUpgrade,Nowheresly/odoo,savoirfairelinux/odoo,mszewczy/odoo,simongoffin/website_version,bobisme/odoo,abstract-open-solutions/OCB,JCA-Developpement/Odoo,salaria/odoo,addition-it-solutions/project-all,sergio-incaser/odoo,poljeff/odoo,dezynetechnologies/odoo,dsfsdgsbngfggb/odoo,jiachenning/odoo,Ernesto99/odoo,Danisan/odoo-1,cysnake4713/odoo,Nowheresly/odoo,oihane/odoo,FlorianLudwig/odoo,vrenaville/ngo-addons-backport,goliveirab/odoo,VitalPet/odoo,dalegregory/odoo,Nowheresly/odoo,ramitalat/odoo,kittiu/odoo,minhtuancn/odoo,hubsaysnuaa/odoo,jusdng/odoo,Endika/OpenUpgrade,hoatle/odoo,codekaki/odoo,gdgellatly/OCB1,nuuuboo/odoo,eino-makitalo/odoo,OpenUpgrade/OpenUpgrade,slevenhagen/odoo,Kilhog/odoo,grap/OCB,OpenUpgrade-dev/OpenUpgrade,rdeheele/odoo,csrocha/OpenUpgrade,sv-dev1/odoo,BT-astauder/odoo,feroda/odoo,grap/OCB,frouty/odoo_oph,SAM-IT-SA/odoo,Nowheresly/odoo,fjbatresv/odoo,tvibliani/odoo,JonathanStein/odoo,Ichag/odoo,fgesora/odoo,arthru/OpenUpgrade,fevxie/odoo,srimai/odoo,shingonoide/odoo,nagyistoce/odoo-dev-odoo,Nick-OpusVL/odoo,makinacorpus/odoo,minhtuancn/odoo,goliveirab/odoo,odoo-turkiye/odoo,shaufi10/odoo,apocalypsebg/odoo,bwrsandman/OpenUpgrade,ygol/odoo,ChanduERP/odoo,alexteodor/odoo,bguillot/OpenUpgrade,ihsanudin/odoo,ramadhane/odoo,FlorianLudwig/odoo,steedos/odoo,erkrishna9/odoo,gvb/odoo,thanhacun/odoo,credativUK/OCB,microcom/odoo,Drooids/odoo,cdrooom/odoo,datenbetrieb/odoo,feroda/odoo,CopeX/odoo,lgscofield/odoo,ShineFan/odoo,ShineFan/odoo,tinkerthaler/odoo,elmerdpadilla/iv,Nick-OpusVL/odoo,abdellatifkarroum/odoo,oihane/odoo,savoirfairelinux/OpenUpgrade,brijeshkesariya/odoo,Adel-Magebinary/odoo,gdgellatly/OCB1,PongPi/isl-odoo,thanhacun/odoo,ecosoft-odoo/odoo,abenzbiria/clients_odoo,cedk/odoo,kittiu/odoo,QianBIG/odoo,srimai/odoo,SerpentCS/odoo,Endika/odoo,NeovaHealth/odoo,stonegithubs/odoo,GauravSahu/odoo,vnsofthe/odoo,NL66278/OCB,grap/OCB,draugiskisprendimai/odoo,prospwro/odoo,mustafat/odoo-1,storm-computers/odoo,gdgellatly/OCB1,incaser/odoo-odoo,odoo-turkiye/odoo,NeovaHealth/odoo,guewen/OpenUpgrade,juanalfonsopr/odoo,BT-fgarbely/odoo,fuselock/odoo,rowemoore/odoo,OpenUpgrade-dev/OpenUpgrade,osvalr/odoo,grap/OpenUpgrade,joshuajan/odoo,dkubiak789/odoo,odoo-turkiye/odoo,Daniel-CA/odoo,alqfahad/odoo,optima-ict/odoo,luiseduardohdbackup/odoo,javierTerry/odoo,oasiswork/odoo,florentx/OpenUpgrade,OpenPymeMx/OCB,dariemp/odoo,jeasoft/odoo,hifly/OpenUpgrade,eino-makitalo/odoo,colinnewell/odoo,savoirfairelinux/odoo,erkrishna9/odoo,alexcuellar/odoo,Noviat/odoo,FlorianLudwig/odoo,massot/odoo,srimai/odoo,dfang/odoo,Endika/odoo,wangjun/odoo,BT-astauder/odoo,bguillot/OpenUpgrade,mlaitinen/odoo,abenzbiria/clients_odoo,idncom/odoo,diagramsoftware/odoo,odootr/odoo,joariasl/odoo,dariemp/odoo,havt/odoo,fuhongliang/odoo,shaufi/odoo,fuhongliang/odoo,cloud9UG/odoo,christophlsa/odoo,mkieszek/odoo,ramitalat/odoo,CopeX/odoo,sebalix/OpenUpgrade,cdrooom/odoo,lightcn/odoo,lightcn/odoo,bakhtout/odoo-educ,gsmartway/odoo,hoatle/odoo,ccomb/OpenUpgrade,guerrerocarlos/odoo,alqfahad/odoo,TRESCLOUD/odoopub,nitinitprof/odoo,Antiun/odoo,pedrobaeza/OpenUpgrade,doomsterinc/odoo,cpyou/odoo,Kilhog/odoo,nhomar/odoo,OpenUpgrade-dev/OpenUpgrade,dariemp/odoo,oasiswork/odoo,Bachaco-ve/odoo,camptocamp/ngo-addons-backport,jfpla/odoo,lombritz/odoo,stonegithubs/odoo,draugiskisprendimai/odoo,hanicker/odoo,colinnewell/odoo,sysadminmatmoz/OCB,kittiu/odoo,rahuldhote/odoo,Gitlab11/odoo,waytai/odoo,apocalypsebg/odoo,tvtsoft/odoo8,fuselock/odoo,savoirfairelinux/odoo,leorochael/odoo,sve-odoo/odoo,bwrsandman/OpenUpgrade,MarcosCommunity/odoo,dfang/odoo,ehirt/odoo,fuselock/odoo,alhashash/odoo,KontorConsulting/odoo,mmbtba/odoo,windedge/odoo,shaufi/odoo,ApuliaSoftware/odoo,ChanduERP/odoo,CubicERP/odoo,jaxkodex/odoo,OpenUpgrade/OpenUpgrade,pplatek/odoo,gvb/odoo,laslabs/odoo,abstract-open-solutions/OCB,nexiles/odoo,apanju/odoo,tarzan0820/odoo,windedge/odoo,havt/odoo,CopeX/odoo,ramadhane/odoo,ingadhoc/odoo,fevxie/odoo,laslabs/odoo,agrista/odoo-saas,kirca/OpenUpgrade,slevenhagen/odoo-npg,dalegregory/odoo,markeTIC/OCB,ccomb/OpenUpgrade,spadae22/odoo,JGarcia-Panach/odoo,CubicERP/odoo,steedos/odoo,eino-makitalo/odoo,makinacorpus/odoo,nuuuboo/odoo,dalegregory/odoo,camptocamp/ngo-addons-backport,highco-groupe/odoo,funkring/fdoo,srimai/odoo,florian-dacosta/OpenUpgrade,zchking/odoo,osvalr/odoo,Nowheresly/odoo,bobisme/odoo,rgeleta/odoo,ramadhane/odoo,TRESCLOUD/odoopub,mustafat/odoo-1,klunwebale/odoo,hanicker/odoo,BT-rmartin/odoo,NeovaHealth/odoo,tvibliani/odoo,juanalfonsopr/odoo,virgree/odoo,diagramsoftware/odoo,christophlsa/odoo,massot/odoo,naousse/odoo,xujb/odoo,addition-it-solutions/project-all,dkubiak789/odoo,bkirui/odoo,camptocamp/ngo-addons-backport,funkring/fdoo,Eric-Zhong/odoo,podemos-info/odoo,mmbtba/odoo,spadae22/odoo,CubicERP/odoo,microcom/odoo,numerigraphe/odoo,fuselock/odoo,sergio-incaser/odoo,pedrobaeza/OpenUpgrade,gorjuce/odoo,provaleks/o8,mkieszek/odoo,AuyaJackie/odoo,kybriainfotech/iSocioCRM,CatsAndDogsbvba/odoo,oliverhr/odoo,Gitlab11/odoo,klunwebale/odoo,poljeff/odoo,QianBIG/odoo,fevxie/odoo,blaggacao/OpenUpgrade,lgscofield/odoo,wangjun/odoo,slevenhagen/odoo-npg,bwrsandman/OpenUpgrade,klunwebale/odoo,n0m4dz/odoo,papouso/odoo,luiseduardohdbackup/odoo,xzYue/odoo,NL66278/OCB,rschnapka/odoo,sinbazhou/odoo,JonathanStein/odoo,BT-fgarbely/odoo,sergio-incaser/odoo,microcom/odoo,chiragjogi/odoo,podemos-info/odoo,RafaelTorrealba/odoo,guewen/OpenUpgrade,Danisan/odoo-1,ihsanudin/odoo,nexiles/odoo,PongPi/isl-odoo,fuselock/odoo,microcom/odoo,luistorresm/odoo,aviciimaxwell/odoo,markeTIC/OCB,Adel-Magebinary/odoo,Drooids/odoo,ehirt/odoo,ihsanudin/odoo,TRESCLOUD/odoopub,prospwro/odoo,abdellatifkarroum/odoo,dfang/odoo,VielSoft/odoo,vnsofthe/odoo,syci/OCB,VielSoft/odoo,mlaitinen/odoo,gdgellatly/OCB1,minhtuancn/odoo,incaser/odoo-odoo,jolevq/odoopub,addition-it-solutions/project-all,rahuldhote/odoo,brijeshkesariya/odoo,dsfsdgsbngfggb/odoo,SerpentCS/odoo,nitinitprof/odoo,tvtsoft/odoo8,kybriainfotech/iSocioCRM,ovnicraft/odoo,papouso/odoo,OpenPymeMx/OCB,florian-dacosta/OpenUpgrade,takis/odoo,elmerdpadilla/iv,mkieszek/odoo,jesramirez/odoo,mvaled/OpenUpgrade,ingadhoc/odoo,Kilhog/odoo,ApuliaSoftware/odoo,nhomar/odoo-mirror,provaleks/o8,ApuliaSoftware/odoo,tinkhaven-organization/odoo,NeovaHealth/odoo,tangyiyong/odoo,CopeX/odoo,patmcb/odoo,guerrerocarlos/odoo,ubic135/odoo-design,omprakasha/odoo,avoinsystems/odoo,sv-dev1/odoo,synconics/odoo,dalegregory/odoo,makinacorpus/odoo,mmbtba/odoo,apanju/odoo,gdgellatly/OCB1,cedk/odoo,nuncjo/odoo,realsaiko/odoo,brijeshkesariya/odoo,Adel-Magebinary/odoo,x111ong/odoo,ubic135/odoo-design,ChanduERP/odoo,salaria/odoo,addition-it-solutions/project-all,jesramirez/odoo,slevenhagen/odoo-npg,pedrobaeza/odoo,gorjuce/odoo,tvibliani/odoo,BT-ojossen/odoo,podemos-info/odoo,jfpla/odoo,luistorresm/odoo,Elico-Corp/odoo_OCB,joshuajan/odoo,apanju/GMIO_Odoo,fuselock/odoo,jiachenning/odoo,srsman/odoo,cysnake4713/odoo,guerrerocarlos/odoo,fossoult/odoo,jaxkodex/odoo,vrenaville/ngo-addons-backport,rubencabrera/odoo,wangjun/odoo,mlaitinen/odoo,fossoult/odoo,dgzurita/odoo,Danisan/odoo-1,guewen/OpenUpgrade,ecosoft-odoo/odoo,florentx/OpenUpgrade,alexteodor/odoo,OSSESAC/odoopubarquiluz,vnsofthe/odoo,doomsterinc/odoo,PongPi/isl-odoo,florian-dacosta/OpenUpgrade,RafaelTorrealba/odoo,fossoult/odoo,apocalypsebg/odoo,aviciimaxwell/odoo,colinnewell/odoo,ThinkOpen-Solutions/odoo,cloud9UG/odoo,rdeheele/odoo,florentx/OpenUpgrade,FlorianLudwig/odoo,dalegregory/odoo,shaufi/odoo,BT-astauder/odoo,havt/odoo,pplatek/odoo,dgzurita/odoo,hubsaysnuaa/odoo,joariasl/odoo,cloud9UG/odoo,OpenPymeMx/OCB,ccomb/OpenUpgrade,0k/OpenUpgrade,OpenUpgrade/OpenUpgrade,minhtuancn/odoo,takis/odoo,sysadminmatmoz/OCB,florian-dacosta/OpenUpgrade,OpenPymeMx/OCB,dgzurita/odoo,x111ong/odoo,codekaki/odoo,abdellatifkarroum/odoo,Endika/OpenUpgrade,alhashash/odoo,abenzbiria/clients_odoo,oasiswork/odoo,mustafat/odoo-1,joshuajan/odoo,draugiskisprendimai/odoo,QianBIG/odoo,ujjwalwahi/odoo,nitinitprof/odoo,Drooids/odoo,vnsofthe/odoo,Bachaco-ve/odoo,ChanduERP/odoo,sergio-incaser/odoo,vrenaville/ngo-addons-backport,Eric-Zhong/odoo,storm-computers/odoo,abdellatifkarroum/odoo,shingonoide/odoo,waytai/odoo,jpshort/odoo,sadleader/odoo,ccomb/OpenUpgrade,hmen89/odoo,jusdng/odoo,QianBIG/odoo,stephen144/odoo,charbeljc/OCB,xujb/odoo,dgzurita/odoo,mszewczy/odoo,odoousers2014/odoo,wangjun/odoo,bkirui/odoo,kybriainfotech/iSocioCRM,ingadhoc/odoo,erkrishna9/odoo,ygol/odoo,tvtsoft/odoo8,aviciimaxwell/odoo,fuselock/odoo,datenbetrieb/odoo,x111ong/odoo,leoliujie/odoo,BT-rmartin/odoo,Ernesto99/odoo,hmen89/odoo,collex100/odoo,srsman/odoo,andreparames/odoo,factorlibre/OCB,Eric-Zhong/odoo,demon-ru/iml-crm,hopeall/odoo,colinnewell/odoo,goliveirab/odoo,BT-rmartin/odoo,OpenPymeMx/OCB,brijeshkesariya/odoo,cysnake4713/odoo,florian-dacosta/OpenUpgrade,Daniel-CA/odoo,christophlsa/odoo,apanju/odoo,sysadminmatmoz/OCB,ThinkOpen-Solutions/odoo,idncom/odoo,blaggacao/OpenUpgrade,tarzan0820/odoo,VielSoft/odoo,juanalfonsopr/odoo,kirca/OpenUpgrade,takis/odoo,BT-fgarbely/odoo,oasiswork/odoo,hoatle/odoo,virgree/odoo,cdrooom/odoo,spadae22/odoo,gavin-feng/odoo,microcom/odoo,Gitlab11/odoo,nhomar/odoo,incaser/odoo-odoo,draugiskisprendimai/odoo,hassoon3/odoo,christophlsa/odoo,nexiles/odoo,ujjwalwahi/odoo,codekaki/odoo,Eric-Zhong/odoo,0k/OpenUpgrade,jusdng/odoo,glovebx/odoo,prospwro/odoo,Nowheresly/odoo,matrixise/odoo,hassoon3/odoo,stephen144/odoo,ujjwalwahi/odoo,credativUK/OCB,tvtsoft/odoo8,mszewczy/odoo,vnsofthe/odoo,joshuajan/odoo,ujjwalwahi/odoo,alqfahad/odoo,nuncjo/odoo,sebalix/OpenUpgrade,omprakasha/odoo,sysadminmatmoz/OCB,dsfsdgsbngfggb/odoo,BT-ojossen/odoo,steedos/odoo,gsmartway/odoo,xujb/odoo,odootr/odoo,grap/OCB,bkirui/odoo,kirca/OpenUpgrade,gvb/odoo,mustafat/odoo-1,matrixise/odoo,Codefans-fan/odoo,podemos-info/odoo,erkrishna9/odoo,collex100/odoo,Bachaco-ve/odoo,shivam1111/odoo,gdgellatly/OCB1,frouty/odoogoeen,0k/odoo,fgesora/odoo,apanju/GMIO_Odoo,hopeall/odoo,goliveirab/odoo,joariasl/odoo,Eric-Zhong/odoo,draugiskisprendimai/odoo,hopeall/odoo,KontorConsulting/odoo,Elico-Corp/odoo_OCB,zchking/odoo,Gitlab11/odoo,luiseduardohdbackup/odoo,bobisme/odoo,pedrobaeza/odoo,PongPi/isl-odoo,addition-it-solutions/project-all,rschnapka/odoo,salaria/odoo,TRESCLOUD/odoopub,gavin-feng/odoo,sve-odoo/odoo,steedos/odoo,jusdng/odoo,gsmartway/odoo,poljeff/odoo,rowemoore/odoo,fevxie/odoo,joariasl/odoo,Grirrane/odoo,KontorConsulting/odoo,lombritz/odoo,podemos-info/odoo,virgree/odoo,camptocamp/ngo-addons-backport,gorjuce/odoo,rschnapka/odoo,oihane/odoo,Ichag/odoo,hip-odoo/odoo,nhomar/odoo-mirror,chiragjogi/odoo,ramitalat/odoo,odooindia/odoo,savoirfairelinux/OpenUpgrade,zchking/odoo,pplatek/odoo,incaser/odoo-odoo,funkring/fdoo,kirca/OpenUpgrade,lsinfo/odoo,acshan/odoo,fdvarela/odoo8,oliverhr/odoo,abstract-open-solutions/OCB,damdam-s/OpenUpgrade,camptocamp/ngo-addons-backport,laslabs/odoo,brijeshkesariya/odoo,bealdav/OpenUpgrade,dalegregory/odoo,luistorresm/odoo,dezynetechnologies/odoo,Elico-Corp/odoo_OCB,fuhongliang/odoo,papouso/odoo,florentx/OpenUpgrade,kirca/OpenUpgrade,bguillot/OpenUpgrade,lombritz/odoo,markeTIC/OCB,oliverhr/odoo,tvtsoft/odoo8,Bachaco-ve/odoo,juanalfonsopr/odoo,salaria/odoo,credativUK/OCB,diagramsoftware/odoo,minhtuancn/odoo,inspyration/odoo,Noviat/odoo,aviciimaxwell/odoo,alhashash/odoo,lsinfo/odoo,SAM-IT-SA/odoo,CopeX/odoo,jfpla/odoo,lsinfo/odoo,dsfsdgsbngfggb/odoo,guewen/OpenUpgrade,Ernesto99/odoo,joariasl/odoo,Maspear/odoo,hbrunn/OpenUpgrade,Endika/odoo,hubsaysnuaa/odoo,fuhongliang/odoo,nagyistoce/odoo-dev-odoo,ubic135/odoo-design,feroda/odoo,Adel-Magebinary/odoo,waytai/odoo,pplatek/odoo,tinkerthaler/odoo,realsaiko/odoo,oliverhr/odoo,fuhongliang/odoo,JGarcia-Panach/odoo,Grirrane/odoo,lgscofield/odoo,ojengwa/odoo,rubencabrera/odoo,csrocha/OpenUpgrade,goliveirab/odoo,Eric-Zhong/odoo,bplancher/odoo,thanhacun/odoo,funkring/fdoo,cedk/odoo,luistorresm/odoo,poljeff/odoo,odooindia/odoo,collex100/odoo,ChanduERP/odoo,lightcn/odoo,srimai/odoo,OpusVL/odoo,ingadhoc/odoo,Grirrane/odoo,luistorresm/odoo,ramitalat/odoo,Antiun/odoo,wangjun/odoo,CubicERP/odoo,BT-fgarbely/odoo,goliveirab/odoo,slevenhagen/odoo,mmbtba/odoo,0k/odoo,tangyiyong/odoo,mvaled/OpenUpgrade,kifcaliph/odoo,jiachenning/odoo,makinacorpus/odoo,gvb/odoo,kittiu/odoo,shingonoide/odoo,dsfsdgsbngfggb/odoo,credativUK/OCB,jaxkodex/odoo,patmcb/odoo,fjbatresv/odoo,lombritz/odoo,Endika/OpenUpgrade,klunwebale/odoo,Daniel-CA/odoo,glovebx/odoo,QianBIG/odoo,n0m4dz/odoo,Gitlab11/odoo,diagramsoftware/odoo,hubsaysnuaa/odoo,srsman/odoo,hanicker/odoo,christophlsa/odoo,papouso/odoo,mvaled/OpenUpgrade,javierTerry/odoo,kittiu/odoo,guerrerocarlos/odoo,ehirt/odoo,acshan/odoo,savoirfairelinux/odoo,sv-dev1/odoo,ygol/odoo,grap/OpenUpgrade,ojengwa/odoo,jiangzhixiao/odoo,ApuliaSoftware/odoo,shivam1111/odoo,JCA-Developpement/Odoo,BT-ojossen/odoo,Elico-Corp/odoo_OCB,jusdng/odoo,dkubiak789/odoo,javierTerry/odoo,VielSoft/odoo,SAM-IT-SA/odoo,kybriainfotech/iSocioCRM,Danisan/odoo-1,jpshort/odoo,odoousers2014/odoo,syci/OCB,hopeall/odoo,jiangzhixiao/odoo,ihsanudin/odoo,fossoult/odoo,sadleader/odoo,ApuliaSoftware/odoo,idncom/odoo,tvibliani/odoo,virgree/odoo,luiseduardohdbackup/odoo,ingadhoc/odoo,savoirfairelinux/odoo,Codefans-fan/odoo,cysnake4713/odoo,fgesora/odoo,luistorresm/odoo,apocalypsebg/odoo,hubsaysnuaa/odoo,apanju/odoo,FlorianLudwig/odoo,slevenhagen/odoo,JonathanStein/odoo,ubic135/odoo-design,numerigraphe/odoo,odooindia/odoo,VitalPet/odoo,patmcb/odoo,windedge/odoo,ujjwalwahi/odoo,rdeheele/odoo,naousse/odoo,csrocha/OpenUpgrade,n0m4dz/odoo,fgesora/odoo,glovebx/odoo,Nick-OpusVL/odoo,acshan/odoo,incaser/odoo-odoo,rubencabrera/odoo,OSSESAC/odoopubarquiluz,deKupini/erp,JGarcia-Panach/odoo,oihane/odoo,diagramsoftware/odoo,charbeljc/OCB,mkieszek/odoo,abenzbiria/clients_odoo,n0m4dz/odoo,odoousers2014/odoo,apanju/GMIO_Odoo,idncom/odoo,massot/odoo,PongPi/isl-odoo,Ichag/odoo,VielSoft/odoo,christophlsa/odoo,codekaki/odoo,provaleks/o8,dezynetechnologies/odoo,grap/OCB,shivam1111/odoo,bobisme/odoo,takis/odoo,frouty/odoogoeen,AuyaJackie/odoo,nagyistoce/odoo-dev-odoo,charbeljc/OCB,joariasl/odoo,aviciimaxwell/odoo,joshuajan/odoo,Gitlab11/odoo,ygol/odoo,ramadhane/odoo,Nowheresly/odoo,Ichag/odoo,jiachenning/odoo,fgesora/odoo,grap/OpenUpgrade,gorjuce/odoo,cpyou/odoo,havt/odoo,avoinsystems/odoo,nuncjo/odoo,microcom/odoo,wangjun/odoo,SAM-IT-SA/odoo,BT-fgarbely/odoo,xujb/odoo,ShineFan/odoo,AuyaJackie/odoo,simongoffin/website_version,slevenhagen/odoo,jusdng/odoo,ecosoft-odoo/odoo,Ernesto99/odoo,arthru/OpenUpgrade,simongoffin/website_version,sve-odoo/odoo,virgree/odoo,ccomb/OpenUpgrade,ihsanudin/odoo,sv-dev1/odoo,nuncjo/odoo,florian-dacosta/OpenUpgrade,lsinfo/odoo,AuyaJackie/odoo,kifcaliph/odoo,rahuldhote/odoo,srsman/odoo,SAM-IT-SA/odoo,tangyiyong/odoo,realsaiko/odoo,alexcuellar/odoo,hip-odoo/odoo,jaxkodex/odoo,BT-ojossen/odoo,hifly/OpenUpgrade,demon-ru/iml-crm,BT-astauder/odoo,jpshort/odoo,charbeljc/OCB,acshan/odoo,codekaki/odoo,nuncjo/odoo,nagyistoce/odoo-dev-odoo,mmbtba/odoo,OpusVL/odoo,shivam1111/odoo,odooindia/odoo,Eric-Zhong/odoo,savoirfairelinux/OpenUpgrade,CatsAndDogsbvba/odoo,frouty/odoo_oph,hubsaysnuaa/odoo,dllsf/odootest,ThinkOpen-Solutions/odoo,sinbazhou/odoo,frouty/odoogoeen,juanalfonsopr/odoo,pedrobaeza/odoo,jesramirez/odoo,bkirui/odoo,dgzurita/odoo,JCA-Developpement/Odoo,ehirt/odoo,prospwro/odoo,VitalPet/odoo,Antiun/odoo,ihsanudin/odoo,bobisme/odoo,ovnicraft/odoo,rahuldhote/odoo,QianBIG/odoo,GauravSahu/odoo,tinkhaven-organization/odoo,pedrobaeza/OpenUpgrade,factorlibre/OCB,Maspear/odoo,Noviat/odoo,cysnake4713/odoo,stonegithubs/odoo,kybriainfotech/iSocioCRM,sve-odoo/odoo,demon-ru/iml-crm,minhtuancn/odoo,nuuuboo/odoo,CopeX/odoo,acshan/odoo,fossoult/odoo,JGarcia-Panach/odoo,x111ong/odoo,abstract-open-solutions/OCB,windedge/odoo,jolevq/odoopub,ChanduERP/odoo,ramadhane/odoo,ihsanudin/odoo,laslabs/odoo,savoirfairelinux/OpenUpgrade,Nick-OpusVL/odoo,cpyou/odoo,Nick-OpusVL/odoo,grap/OpenUpgrade,glovebx/odoo,gsmartway/odoo,arthru/OpenUpgrade,omprakasha/odoo,ojengwa/odoo,draugiskisprendimai/odoo,oasiswork/odoo,inspyration/odoo,nhomar/odoo-mirror,realsaiko/odoo,sadleader/odoo,datenbetrieb/odoo,markeTIC/OCB,Grirrane/odoo,alexcuellar/odoo,Elico-Corp/odoo_OCB,Noviat/odoo,Adel-Magebinary/odoo,ojengwa/odoo,leoliujie/odoo,BT-ojossen/odoo,steedos/odoo,OSSESAC/odoopubarquiluz,mlaitinen/odoo,guerrerocarlos/odoo,omprakasha/odoo,alqfahad/odoo,vrenaville/ngo-addons-backport,cdrooom/odoo,grap/OCB,shaufi10/odoo,hifly/OpenUpgrade,cedk/odoo,virgree/odoo,fdvarela/odoo8,bguillot/OpenUpgrade,JGarcia-Panach/odoo,alexteodor/odoo,zchking/odoo,jfpla/odoo,podemos-info/odoo,jolevq/odoopub,Ichag/odoo,frouty/odoo_oph,mvaled/OpenUpgrade,ehirt/odoo,jesramirez/odoo,Grirrane/odoo,savoirfairelinux/OpenUpgrade,fossoult/odoo,provaleks/o8,bplancher/odoo,collex100/odoo,BT-rmartin/odoo,odoousers2014/odoo,hifly/OpenUpgrade,SAM-IT-SA/odoo,tinkerthaler/odoo,datenbetrieb/odoo,gavin-feng/odoo,GauravSahu/odoo,fjbatresv/odoo,hip-odoo/odoo,thanhacun/odoo,BT-ojossen/odoo,odoousers2014/odoo,codekaki/odoo,cloud9UG/odoo,tinkhaven-organization/odoo,ApuliaSoftware/odoo,MarcosCommunity/odoo,damdam-s/OpenUpgrade,nexiles/odoo,0k/odoo,gorjuce/odoo,n0m4dz/odoo,kittiu/odoo,synconics/odoo,srsman/odoo,Maspear/odoo,rgeleta/odoo,PongPi/isl-odoo,vrenaville/ngo-addons-backport,odootr/odoo,odootr/odoo,poljeff/odoo,fevxie/odoo,Kilhog/odoo,fossoult/odoo,VielSoft/odoo,hanicker/odoo,credativUK/OCB,grap/OCB,arthru/OpenUpgrade,abstract-open-solutions/OCB,Codefans-fan/odoo,simongoffin/website_version,shaufi10/odoo,mszewczy/odoo,sv-dev1/odoo,alexcuellar/odoo,lightcn/odoo,ovnicraft/odoo,ovnicraft/odoo,agrista/odoo-saas,rschnapka/odoo,alhashash/odoo,mustafat/odoo-1,kittiu/odoo,BT-fgarbely/odoo,sysadminmatmoz/OCB,steedos/odoo,ClearCorp-dev/odoo,mvaled/OpenUpgrade,leorochael/odoo,AuyaJackie/odoo,osvalr/odoo,CatsAndDogsbvba/odoo,tarzan0820/odoo,mlaitinen/odoo,jeasoft/odoo,idncom/odoo,ramadhane/odoo,Codefans-fan/odoo,abstract-open-solutions/OCB,Endika/OpenUpgrade,camptocamp/ngo-addons-backport,andreparames/odoo,Daniel-CA/odoo,odoo-turkiye/odoo,bplancher/odoo,odootr/odoo,spadae22/odoo,bguillot/OpenUpgrade,funkring/fdoo,deKupini/erp,hubsaysnuaa/odoo,Codefans-fan/odoo,NL66278/OCB,KontorConsulting/odoo,lgscofield/odoo,janocat/odoo,oliverhr/odoo,stonegithubs/odoo,Daniel-CA/odoo,kifcaliph/odoo,frouty/odoogoeen,cloud9UG/odoo,gavin-feng/odoo,Endika/odoo,hanicker/odoo,OpenUpgrade/OpenUpgrade,makinacorpus/odoo,ClearCorp-dev/odoo,bguillot/OpenUpgrade,highco-groupe/odoo,synconics/odoo,ecosoft-odoo/odoo,grap/OpenUpgrade,VitalPet/odoo,tinkhaven-organization/odoo,CatsAndDogsbvba/odoo,pedrobaeza/OpenUpgrade,stonegithubs/odoo,andreparames/odoo,AuyaJackie/odoo,draugiskisprendimai/odoo,Endika/odoo,havt/odoo,Maspear/odoo,klunwebale/odoo,apanju/GMIO_Odoo,gvb/odoo,eino-makitalo/odoo,goliveirab/odoo,odoo-turkiye/odoo,xzYue/odoo,storm-computers/odoo,rowemoore/odoo,datenbetrieb/odoo,Codefans-fan/odoo,jfpla/odoo,Codefans-fan/odoo,chiragjogi/odoo,Endika/OpenUpgrade,elmerdpadilla/iv,x111ong/odoo,ccomb/OpenUpgrade,waytai/odoo,shaufi/odoo,CatsAndDogsbvba/odoo,dkubiak789/odoo,bplancher/odoo,bealdav/OpenUpgrade,jiangzhixiao/odoo,ojengwa/odoo,shingonoide/odoo,lightcn/odoo,slevenhagen/odoo,naousse/odoo,naousse/odoo,blaggacao/OpenUpgrade,damdam-s/OpenUpgrade,funkring/fdoo,omprakasha/odoo,fuhongliang/odoo,frouty/odoo_oph,nagyistoce/odoo-dev-odoo,SerpentCS/odoo,apanju/GMIO_Odoo,JCA-Developpement/Odoo,stephen144/odoo,agrista/odoo-saas,Elico-Corp/odoo_OCB,feroda/odoo,nuuuboo/odoo,dezynetechnologies/odoo,ovnicraft/odoo,x111ong/odoo,klunwebale/odoo,sinbazhou/odoo,bakhtout/odoo-educ,apanju/odoo,KontorConsulting/odoo,jeasoft/odoo,pplatek/odoo,luistorresm/odoo,mmbtba/odoo,TRESCLOUD/odoopub,hbrunn/OpenUpgrade,abdellatifkarroum/odoo,shaufi/odoo,shingonoide/odoo,srsman/odoo,rowemoore/odoo,OpenUpgrade-dev/OpenUpgrade,csrocha/OpenUpgrade,stephen144/odoo,GauravSahu/odoo,oihane/odoo,massot/odoo,ehirt/odoo,gorjuce/odoo,CatsAndDogsbvba/odoo,hifly/OpenUpgrade,SerpentCS/odoo,slevenhagen/odoo-npg,Antiun/odoo,agrista/odoo-saas,elmerdpadilla/iv,ehirt/odoo,zchking/odoo,bakhtout/odoo-educ,shaufi10/odoo
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
import membership_unpaid_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:[FIX] membership: Remove unpaid inovice import from init file
bzr revid: mra@mra-laptop-20101006072219-twq77xlem3d69rg7
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
import membership_unpaid_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<commit_msg>[FIX] membership: Remove unpaid inovice import from init file
bzr revid: mra@mra-laptop-20101006072219-twq77xlem3d69rg7<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
import membership_unpaid_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:[FIX] membership: Remove unpaid inovice import from init file
bzr revid: mra@mra-laptop-20101006072219-twq77xlem3d69rg7# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
import membership_unpaid_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<commit_msg>[FIX] membership: Remove unpaid inovice import from init file
bzr revid: mra@mra-laptop-20101006072219-twq77xlem3d69rg7<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import membership_invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ba7548816106c652b8c85a9aa703b7d40b5c2307
|
slave/skia_slave_scripts/valgrind_run_decoding_tests.py
|
slave/skia_slave_scripts/valgrind_run_decoding_tests.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia skimage executable. """
from build_step import BuildStep
from valgrind_build_step import ValgrindBuildStep
from run_decoding_tests import RunDecodingTests
import sys
class ValgrindRunDecodingTests(ValgrindBuildStep, RunDecodingTests):
pass
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ValgrindRunDecodingTests))
|
Add missing skimage script for valgrind
|
Add missing skimage script for valgrind
Unreviewed.
(RunBuilders:Test-Ubuntu12-ShuttleA-HD2000-x86_64-Release-Valgrind)
Review URL: https://codereview.chromium.org/18301004
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9894 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot
|
Add missing skimage script for valgrind
Unreviewed.
(RunBuilders:Test-Ubuntu12-ShuttleA-HD2000-x86_64-Release-Valgrind)
Review URL: https://codereview.chromium.org/18301004
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9894 2bbb7eff-a529-9590-31e7-b0007b416f81
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia skimage executable. """
from build_step import BuildStep
from valgrind_build_step import ValgrindBuildStep
from run_decoding_tests import RunDecodingTests
import sys
class ValgrindRunDecodingTests(ValgrindBuildStep, RunDecodingTests):
pass
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ValgrindRunDecodingTests))
|
<commit_before><commit_msg>Add missing skimage script for valgrind
Unreviewed.
(RunBuilders:Test-Ubuntu12-ShuttleA-HD2000-x86_64-Release-Valgrind)
Review URL: https://codereview.chromium.org/18301004
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9894 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia skimage executable. """
from build_step import BuildStep
from valgrind_build_step import ValgrindBuildStep
from run_decoding_tests import RunDecodingTests
import sys
class ValgrindRunDecodingTests(ValgrindBuildStep, RunDecodingTests):
pass
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ValgrindRunDecodingTests))
|
Add missing skimage script for valgrind
Unreviewed.
(RunBuilders:Test-Ubuntu12-ShuttleA-HD2000-x86_64-Release-Valgrind)
Review URL: https://codereview.chromium.org/18301004
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9894 2bbb7eff-a529-9590-31e7-b0007b416f81#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia skimage executable. """
from build_step import BuildStep
from valgrind_build_step import ValgrindBuildStep
from run_decoding_tests import RunDecodingTests
import sys
class ValgrindRunDecodingTests(ValgrindBuildStep, RunDecodingTests):
pass
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ValgrindRunDecodingTests))
|
<commit_before><commit_msg>Add missing skimage script for valgrind
Unreviewed.
(RunBuilders:Test-Ubuntu12-ShuttleA-HD2000-x86_64-Release-Valgrind)
Review URL: https://codereview.chromium.org/18301004
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9894 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia skimage executable. """
from build_step import BuildStep
from valgrind_build_step import ValgrindBuildStep
from run_decoding_tests import RunDecodingTests
import sys
class ValgrindRunDecodingTests(ValgrindBuildStep, RunDecodingTests):
pass
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(ValgrindRunDecodingTests))
|
|
f5915cf24dc7e6fed96700e00a3bbc6ccb6bc552
|
core/utils.py
|
core/utils.py
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
if duration.index('.') == 0:
duration = '0' + duration
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
Fix bug that caused error when nothing is before a decimal duration input
|
Fix bug that caused error when nothing is before a decimal duration input
|
Python
|
bsd-2-clause
|
cdubz/timestrap,overshard/timestrap,overshard/timestrap,cdubz/timestrap,muhleder/timestrap,cdubz/timestrap,overshard/timestrap,muhleder/timestrap,muhleder/timestrap
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
Fix bug that caused error when nothing is before a decimal duration input
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
if duration.index('.') == 0:
duration = '0' + duration
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
<commit_before>from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
<commit_msg>Fix bug that caused error when nothing is before a decimal duration input<commit_after>
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
if duration.index('.') == 0:
duration = '0' + duration
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
Fix bug that caused error when nothing is before a decimal duration inputfrom __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
if duration.index('.') == 0:
duration = '0' + duration
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
<commit_before>from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
<commit_msg>Fix bug that caused error when nothing is before a decimal duration input<commit_after>from __future__ import division
from django.utils.duration import _get_duration_components
from datetime import timedelta
from decimal import Decimal
def parse_duration(duration):
hours = None
minutes = None
if duration.isdigit():
hours = int(duration)
elif ':' in duration:
duration_split = duration.split(':')
hours = int(duration_split[0])
minutes = int(duration_split[1])
elif '.' in duration:
if duration.index('.') == 0:
duration = '0' + duration
duration_split = duration.split('.')
# TODO: Fix error here when not appending a 0, ex .5 instead of 0.5
hours = int(duration_split[0])
minutes = int(60 * float('.' + duration_split[1]))
if minutes is None:
minutes = 0
if hours or minutes:
return timedelta(hours=hours, minutes=minutes)
else:
return None
def duration_string(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
string = '{}:{:02d}'.format(hours, minutes)
else:
string = '0:00'
return string
def duration_decimal(duration):
if duration is not None:
days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501
hours += days * 24
decimal = Decimal(hours) + Decimal(minutes/60)
else:
decimal = Decimal(0)
return decimal
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.